[docs] Migrate docs from AsciiDoc to Markdown (#2635)
* delete asciidoc files * add migrated files * Apply suggestions from review Co-authored-by: Josh Mock <josh@joshmock.com> * Apply suggestions from review Co-authored-by: Josh Mock <josh@joshmock.com> * add the new ci checks (#2634) --------- Co-authored-by: Marci W <333176+marciw@users.noreply.github.com> Co-authored-by: Josh Mock <josh@joshmock.com>
This commit is contained in:
35
.github/workflows/npm-publish.yml
vendored
35
.github/workflows/npm-publish.yml
vendored
@ -23,38 +23,19 @@ jobs:
|
||||
- run: npm install -g npm
|
||||
- run: npm install
|
||||
- run: npm test
|
||||
- name: npm publish
|
||||
run: |
|
||||
version=$(jq -r .version package.json)
|
||||
tag_meta=$(echo "$version" | cut -s -d '-' -f2)
|
||||
if [[ -z "$tag_meta" ]]; then
|
||||
npm publish --provenance --access public
|
||||
else
|
||||
tag=$(echo "$tag_meta" | cut -d '.' -f1)
|
||||
npm publish --provenance --access public --tag "$tag"
|
||||
fi
|
||||
- run: npm publish --provenance --access public --tag alpha
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- name: Publish version on GitHub
|
||||
run: |
|
||||
version=$(jq -r .version package.json)
|
||||
tag_meta=$(echo "$version" | cut -s -d '-' -f2)
|
||||
if [[ -z "$tag_meta" ]]; then
|
||||
gh release create \
|
||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)"
|
||||
--target "$BRANCH_NAME" \
|
||||
--title "v$version" \
|
||||
"v$version"
|
||||
else
|
||||
tag_main=$(echo "$version" | cut -d '-' -f1)
|
||||
gh release create \
|
||||
-n "This is a $tag_main pre-release. Changes may not be stable." \
|
||||
--latest=false \
|
||||
--prerelease \
|
||||
--target "$BRANCH_NAME" \
|
||||
--title "v$version" \
|
||||
"v$version"
|
||||
fi
|
||||
gh release create \
|
||||
-n "This is a 9.0.0 pre-release alpha. Changes may not be stable." \
|
||||
--latest=false \
|
||||
--prerelease \
|
||||
--target "$BRANCH_NAME" \
|
||||
--title "v$version" \
|
||||
"v$version"
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
@ -1,269 +0,0 @@
|
||||
[[basic-config]]
|
||||
=== Basic configuration
|
||||
|
||||
This page shows you the possible basic configuration options that the clients
|
||||
offers.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
maxRetries: 5,
|
||||
sniffOnStart: true
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[cols=2*]
|
||||
|===
|
||||
|`node` or `nodes`
|
||||
a|The Elasticsearch endpoint to use. +
|
||||
It can be a single string or an array of strings:
|
||||
[source,js]
|
||||
----
|
||||
node: 'http://localhost:9200'
|
||||
----
|
||||
Or it can be an object (or an array of objects) that represents the node:
|
||||
[source,js]
|
||||
----
|
||||
node: {
|
||||
url: new URL('http://localhost:9200'),
|
||||
tls: 'tls options',
|
||||
agent: 'http agent options',
|
||||
id: 'custom node id',
|
||||
headers: { 'custom': 'headers' }
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|`auth`
|
||||
a|Your authentication data. You can use both basic authentication and
|
||||
{ref}/security-api-create-api-key.html[ApiKey]. +
|
||||
See <<authentication,Authentication>> for more details. +
|
||||
_Default:_ `null`
|
||||
|
||||
Basic authentication:
|
||||
[source,js]
|
||||
----
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
----
|
||||
{ref}/security-api-create-api-key.html[ApiKey] authentication:
|
||||
[source,js]
|
||||
----
|
||||
auth: {
|
||||
apiKey: 'base64EncodedKey'
|
||||
}
|
||||
----
|
||||
Bearer authentication, useful for https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html[service account tokens]. Be aware that it does not handle automatic token refresh:
|
||||
[source,js]
|
||||
----
|
||||
auth: {
|
||||
bearer: 'token'
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
|`maxRetries`
|
||||
|`number` - Max number of retries for each request. +
|
||||
_Default:_ `3`
|
||||
|
||||
|`requestTimeout`
|
||||
|`number` - Max request timeout in milliseconds for each request. +
|
||||
_Default:_ No value
|
||||
|
||||
|`pingTimeout`
|
||||
|`number` - Max ping request timeout in milliseconds for each request. +
|
||||
_Default:_ `3000`
|
||||
|
||||
|`sniffInterval`
|
||||
|`number, boolean` - Perform a sniff operation every `n` milliseconds. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`sniffOnStart`
|
||||
|`boolean` - Perform a sniff once the client is started. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`sniffEndpoint`
|
||||
|`string` - Endpoint to ping during a sniff. +
|
||||
_Default:_ `'_nodes/_all/http'`
|
||||
|
||||
|`sniffOnConnectionFault`
|
||||
|`boolean` - Perform a sniff on connection fault. Sniffing might not be the best solution for you, take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here] to know more. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`resurrectStrategy`
|
||||
|`string` - Configure the node resurrection strategy. +
|
||||
_Options:_ `'ping'`, `'optimistic'`, `'none'` +
|
||||
_Default:_ `'ping'`
|
||||
|
||||
|`suggestCompression`
|
||||
|`boolean` - Adds `accept-encoding` header to every request. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`compression`
|
||||
|`string, boolean` - Enables gzip request body compression. +
|
||||
_Options:_ `'gzip'`, `false` +
|
||||
_Default:_ `false`
|
||||
|
||||
|`tls`
|
||||
|`http.SecureContextOptions` - tls https://nodejs.org/api/tls.html[configuraton]. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`proxy`
|
||||
a|`string, URL` - If you are using an http(s) proxy, you can put its url here.
|
||||
The client will automatically handle the connection to it. +
|
||||
_Default:_ `null`
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http://localhost:8080'
|
||||
})
|
||||
|
||||
// Proxy with basic authentication
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http://user:pwd@localhost:8080'
|
||||
})
|
||||
----
|
||||
|
||||
|`agent`
|
||||
a|`http.AgentOptions, function` - http agent https://nodejs.org/api/http.html#http_new_agent_options[options],
|
||||
or a function that returns an actual http agent instance. If you want to disable the http agent use entirely
|
||||
(and disable the `keep-alive` feature), set the agent to `false`. +
|
||||
_Default:_ `null`
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent: { agent: 'options' }
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// the function takes as parameter the option
|
||||
// object passed to the Connection constructor
|
||||
agent: (opts) => new CustomAgent()
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// Disable agent and keep-alive
|
||||
agent: false
|
||||
})
|
||||
----
|
||||
|
||||
|`nodeFilter`
|
||||
a|`function` - Filters which node not to use for a request. +
|
||||
_Default:_
|
||||
[source,js]
|
||||
----
|
||||
function defaultNodeFilter (node) {
|
||||
// avoid master only nodes
|
||||
if (node.roles.master === true &&
|
||||
node.roles.data === false &&
|
||||
node.roles.ingest === false) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
----
|
||||
|
||||
|`nodeSelector`
|
||||
a|`function` - custom selection strategy. +
|
||||
_Options:_ `'round-robin'`, `'random'`, custom function +
|
||||
_Default:_ `'round-robin'` +
|
||||
_Custom function example:_
|
||||
[source,js]
|
||||
----
|
||||
function nodeSelector (connections) {
|
||||
const index = calculateIndex()
|
||||
return connections[index]
|
||||
}
|
||||
----
|
||||
|
||||
|`generateRequestId`
|
||||
a|`function` - function to generate the request id for every request, it takes
|
||||
two parameters, the request parameters and options. +
|
||||
By default it generates an incremental integer for every request. +
|
||||
_Custom function example:_
|
||||
[source,js]
|
||||
----
|
||||
function generateRequestId (params, options) {
|
||||
// your id generation logic
|
||||
// must be syncronous
|
||||
return 'id'
|
||||
}
|
||||
----
|
||||
|
||||
|`name`
|
||||
|`string, symbol` - The name to identify the client instance in the events. +
|
||||
_Default:_ `elasticsearch-js`
|
||||
|
||||
|`opaqueIdPrefix`
|
||||
|`string` - A string that will be use to prefix any `X-Opaque-Id` header. +
|
||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html#_x-opaque-id_support[`X-Opaque-Id` support] for more details. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`headers`
|
||||
|`object` - A set of custom headers to send in every request. +
|
||||
_Default:_ `{}`
|
||||
|
||||
|`context`
|
||||
|`object` - A custom object that you can use for observability in your events.
|
||||
It will be merged with the API level context option. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`enableMetaHeader`
|
||||
|`boolean` - If true, adds an header named `'x-elastic-client-meta'`, containing some minimal telemetry data,
|
||||
such as the client and platform version. +
|
||||
_Default:_ `true`
|
||||
|
||||
|`cloud`
|
||||
a|`object` - Custom configuration for connecting to
|
||||
https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
for more details. +
|
||||
_Default:_ `null` +
|
||||
_Cloud configuration example:_
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|`disablePrototypePoisoningProtection`
|
||||
|`boolean`, `'proto'`, `'constructor'` - The client can protect you against prototype poisoning attacks. Read https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08[this article] to learn more about this security concern. If needed, you can enable prototype poisoning protection entirely (`false`) or one of the two checks (`'proto'` or `'constructor'`). For performance reasons, it is disabled by default. Read the `secure-json-parse` https://github.com/fastify/secure-json-parse[documentation] to learn more. +
|
||||
_Default:_ `true`
|
||||
|
||||
|`caFingerprint`
|
||||
|`string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxCompressedResponseSize`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH +
|
||||
_Default:_ `null`
|
||||
|
||||
|===
|
||||
@ -1,966 +0,0 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 9.0.0
|
||||
|
||||
[discrete]
|
||||
==== Breaking changes
|
||||
|
||||
[discrete]
|
||||
===== Drop support for deprecated `body` parameter
|
||||
|
||||
In 8.0, the top-level `body` parameter that was available on all API functions <<remove-body-key,was deprecated>>. In 9.0 this property is completely removed.
|
||||
|
||||
[discrete]
|
||||
===== Remove the default 30-second timeout on all requests sent to Elasticsearch
|
||||
|
||||
Setting HTTP timeouts on Elasticsearch requests goes against Elastic's recommendations. See <<timeout-best-practices>> for more information.
|
||||
|
||||
[discrete]
|
||||
=== 8.17.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.17`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
===== Report correct transport connection type in telemetry
|
||||
|
||||
The client's telemetry reporting mechanism was incorrectly reporting all traffic as using `HttpConnection` when the default is `UndiciConnection`. https://github.com/elastic/elasticsearch-js/issues/2324[#2324]
|
||||
|
||||
[discrete]
|
||||
=== 8.17.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.17`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.17/release-notes-8.17.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.16.4
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.16`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
===== Report correct transport connection type in telemetry
|
||||
|
||||
The client's telemetry reporting mechanism was incorrectly reporting all traffic as using `HttpConnection` when the default is `UndiciConnection`. https://github.com/elastic/elasticsearch-js/issues/2324[#2324]
|
||||
|
||||
[discrete]
|
||||
=== 8.16.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.16`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.16.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.16`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
===== Drop testing artifacts from npm package
|
||||
|
||||
Tap, the unit testing tool used by this project, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
|
||||
|
||||
[discrete]
|
||||
=== 8.16.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix ECMAScript imports
|
||||
|
||||
Fixed package configuration to correctly support native ECMAScript `import` syntax.
|
||||
|
||||
[discrete]
|
||||
=== 8.16.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.16`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.16/release-notes-8.16.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Support Apache Arrow in ES|QL helper
|
||||
|
||||
The ES|QL helper can now return results as an Apache Arrow `Table` or `RecordBatchReader`, which enables high-performance calculations on ES|QL results, even if the response data is larger than the system's available memory. See <<esql-helper>> for more information.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Pass prototype poisoning options to serializer correctly
|
||||
|
||||
The client's `disablePrototypePoisoningProtection` option was set to `true` by default, but when it was set to any other value it was ignored, making it impossible to enable prototype poisoning protection without providing a custom serializer implementation.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
===== Drop testing artifacts from npm package
|
||||
|
||||
Tap, the unit testing tool, was recently upgraded and started writing to a `.tap` directory. Since tests are run prior to an `npm publish` in CI, this directory was being included in the published package and bloating its size.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.15`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.15.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.15.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/release-notes-8.15.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== OpenTelemetry zero-code instrumentation support
|
||||
|
||||
For those that use an observability service that supports OpenTelemetry spans, the client will now automatically generate traces for each Elasticsearch request it makes.
|
||||
See {jsclient}/observability.html#_opentelemetry[the docs]
|
||||
for more information.
|
||||
|
||||
[discrete]
|
||||
=== 8.14.1
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `8.14`
|
||||
|
||||
Updated types based on fixes and changes to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.14.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.14.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.14/release-notes-8.14.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== ES|QL object API helper
|
||||
|
||||
A helper method has been added that parses the response of an ES|QL query and converts it into an array of objects.
|
||||
A TypeScript type parameter can also be provided to improve developer experience when working with the result. https://github.com/elastic/elasticsearch-js/pull/2238[#2238]
|
||||
|
||||
[discrete]
|
||||
===== `onSuccess` callback added to bulk helper
|
||||
|
||||
The bulk helper now supports an `onSuccess` callback that will be called for each successful operation. https://github.com/elastic/elasticsearch-js/pull/2199[#2199]
|
||||
|
||||
[discrete]
|
||||
===== Request retries are more polite
|
||||
|
||||
https://github.com/elastic/elastic-transport-js/releases/tag/v8.6.0[`@elastic/transport` v8.6.0] was released, which refactored when and how failed requests are retried. Timed-out requests are no longer retried by default, and retries now use exponential backoff rather than running immediately.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== 8.13.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Pin @elastic/transport to `~8.4.1`
|
||||
|
||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.13 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
v8.13.0 was also released depending on v8.4.0 of `@elastic/transport` instead of v8.4.1, which was unintentional.
|
||||
|
||||
[discrete]
|
||||
=== 8.13.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.13.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.13/release-notes-8.13.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Ensure new connections inherit client's set defaults https://github.com/elastic/elasticsearch-js/pull/2159[#2159]
|
||||
|
||||
When instantiating a client, any connection-related defaults (e.g. `requestTimeout`) set on that client instance would not be inherited by nodes if they were entered as strings rather than a `ConnectionOptions` object.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.4.1`
|
||||
|
||||
Switching from `^8.4.1` to `~8.4.1` ensures 8.12 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Upgrade transport to 8.4.1 https://github.com/elastic/elasticsearch-js/pull/2137[#2137]
|
||||
|
||||
Upgrades `@elastic/transport` to 8.4.1 to resolve https://github.com/elastic/elastic-transport-js/pull/83[a bug] where arrays in error diagnostics were unintentionally transformed into objects.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix hang in bulk helper semaphore https://github.com/elastic/elasticsearch-js/pull/2027[#2027]
|
||||
|
||||
The failing state could be reached when a server's response times are slower than flushInterval.
|
||||
|
||||
[discrete]
|
||||
=== 8.12.0
|
||||
|
||||
[discrete]
|
||||
=== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.12.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.12/release-notes-8.12.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.11.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.4.0`
|
||||
|
||||
Switching from `^8.4.0` to `~8.4.0` ensures 8.11 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.11.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.11.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.11/release-notes-8.11.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Enhanced support for redacting potentially sensitive data https://github.com/elastic/elasticsearch-js/pull/2095[#2095]
|
||||
|
||||
`@elastic/transport` https://github.com/elastic/elastic-transport-js/releases/tag/v8.4.0[version 8.4.0] introduces enhanced measures for ensuring that request metadata attached to some `Error` objects is redacted. This functionality is primarily to address custom logging solutions that don't use common serialization methods like `JSON.stringify`, `console.log`, or `util.inspect`, which were already accounted for.
|
||||
|
||||
See <<redaction>> for more information.
|
||||
|
||||
[discrete]
|
||||
=== 8.10.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.4`
|
||||
|
||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.10 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.10.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.10.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.10/release-notes-8.10.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.9.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.4`
|
||||
|
||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.9 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.9.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Upgrade Transport https://github.com/elastic/elasticsearch-js/pull/1968[#1968]
|
||||
|
||||
Upgrades `@elastic/transport` to the latest patch release to fix https://github.com/elastic/elastic-transport-js/pull/69[a bug] that could cause the process to exit when handling malformed `HEAD` requests.
|
||||
|
||||
[discrete]
|
||||
=== 8.9.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.9.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.9/release-notes-8.9.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Allow document to be overwritten in `onDocument` iteratee of bulk helper https://github.com/elastic/elasticsearch-js/pull/1732[#1732]
|
||||
|
||||
In the {jsclient}/client-helpers.html#bulk-helper[bulk helper], documents could not be modified before being sent to Elasticsearch. It is now possible to {jsclient}/client-helpers.html#_modifying_a_document_before_operation[modify a document] before sending it.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Updated `user-agent` header https://github.com/elastic/elasticsearch-js/pull/1954[#1954]
|
||||
|
||||
The `user-agent` header the client used to connect to Elasticsearch was using a non-standard format that has been improved.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.2`
|
||||
|
||||
Switching from `^8.3.2` to `~8.3.2` ensures 8.8 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.1
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.8.1`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.1.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix index drift bug in bulk helper https://github.com/elastic/elasticsearch-js/pull/1759[#1759]
|
||||
|
||||
Fixes a bug in the bulk helper that would cause `onDrop` to send back the wrong JSON document or error on a nonexistent document when an error occurred on a bulk HTTP request that contained a `delete` action.
|
||||
|
||||
[discrete]
|
||||
===== Fix a memory leak caused by an outdated version of Undici https://github.com/elastic/elasticsearch-js/pull/1902[#1902]
|
||||
|
||||
Undici 5.5.1, used by https://github.com/elastic/elastic-transport-js[elastic-transport-js], could create a memory leak when a high volume of requests created too many HTTP `abort` listeners. Upgrading Undici to 5.22.1 removed the memory leak.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.8.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix type declarations for legacy types with a body key https://github.com/elastic/elasticsearch-js/pull/1784[#1784]
|
||||
|
||||
Prior releases contained a bug where type declarations for legacy types that include a `body` key were not actually importing the type that includes the `body` key.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.7.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.6.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.6.0
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to 8.3.1+ https://github.com/elastic/elasticsearch-js/pull/1802[#1802]
|
||||
|
||||
The `@elastic/transport` dependency has been bumped to `~8.3.1` to ensure
|
||||
fixes to the `maxResponseSize` option are available in the client.
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.6.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.6/release-notes-8.6.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.5.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.5.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.5/release-notes-8.5.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.4.0
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.4.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.4/release-notes-8.4.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.2.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.2.1`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.2/release-notes-8.2.1.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Fix ndjson APIs https://github.com/elastic/elasticsearch-js/pull/1688[#1688]
|
||||
|
||||
The previous release contained a bug that broken ndjson APIs.
|
||||
We have released `v8.2.0-patch.1` to address this.
|
||||
This fix is the same as the one we have released and we strongly recommend upgrading to this version.
|
||||
|
||||
[discrete]
|
||||
===== Fix node shutdown apis https://github.com/elastic/elasticsearch-js/pull/1697[#1697]
|
||||
|
||||
The shutdown APIs wheren't complete, this fix completes them.
|
||||
|
||||
[discrete]
|
||||
==== Types: move query keys to body https://github.com/elastic/elasticsearch-js/pull/1693[#1693]
|
||||
|
||||
The types definitions where wrongly representing the types of fields present in both query and body.
|
||||
|
||||
[discrete]
|
||||
=== 8.2.0
|
||||
|
||||
[discrete]
|
||||
==== Breaking changes
|
||||
|
||||
[discrete]
|
||||
===== Drop Node.js v12 https://github.com/elastic/elasticsearch-js/pull/1670[#1670]
|
||||
|
||||
According to our https://github.com/elastic/elasticsearch-js#nodejs-support[Node.js support matrix].
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.2`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.2/release-notes-8.2.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== More lenient parameter checks https://github.com/elastic/elasticsearch-js/pull/1662[#1662]
|
||||
|
||||
When creating a new client, an `undefined` `caFingerprint` no longer trigger an error for a http connection.
|
||||
|
||||
[discrete]
|
||||
===== Update TypeScript docs and export estypes https://github.com/elastic/elasticsearch-js/pull/1675[#1675]
|
||||
|
||||
You can import the full TypeScript requests & responses definitions as it follows:
|
||||
[source,ts]
|
||||
----
|
||||
import { estypes } from '@elastic/elasticsearch'
|
||||
----
|
||||
|
||||
If you need the legacy definitions with the body, you can do the following:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
import { estypesWithBody } from '@elastic/elasticsearch'
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Updated hpagent to the latest version https://github.com/elastic/elastic-transport-js/pull/49[transport/#49]
|
||||
|
||||
You can fing the related changes https://github.com/delvedor/hpagent/releases/tag/v1.0.0[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.1.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.1`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.1/release-notes-8.1.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Export SniffingTransport https://github.com/elastic/elasticsearch-js/pull/1653[#1653]
|
||||
|
||||
Now the client exports the SniffingTransport class.
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Fix onFlushTimeout timer not being cleared when upstream errors https://github.com/elastic/elasticsearch-js/pull/1616[#1616]
|
||||
|
||||
Fixes a memory leak caused by an error in the upstream dataset of the bulk helper.
|
||||
|
||||
[discrete]
|
||||
===== Cleanup abort listener https://github.com/elastic/elastic-transport-js/pull/42[transport/#42]
|
||||
|
||||
The legacy http client was not cleaning up the abort listener, which could cause a memory leak.
|
||||
|
||||
[discrete]
|
||||
===== Improve undici performances https://github.com/elastic/elastic-transport-js/pull/41[transport/#41]
|
||||
|
||||
Improve the stream body collection and keep alive timeout.
|
||||
|
||||
[discrete]
|
||||
=== 8.0.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.0`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.0/release-notes-8.0.0.html[here].
|
||||
|
||||
[discrete]
|
||||
===== Drop old typescript definitions
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Medium*
|
||||
|
||||
The current TypeScript definitions will be removed from the client, and the new definitions, which contain request and response definitions as well will be shipped by default.
|
||||
|
||||
[discrete]
|
||||
===== Drop callback-style API
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Large*
|
||||
|
||||
Maintaining both API styles is not a problem per se, but it makes error handling more convoluted due to async stack traces.
|
||||
Moving to a full-promise API will solve this issue.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// callback-style api
|
||||
client.search({ params }, { options }, (err, result) => {
|
||||
console.log(err || result)
|
||||
})
|
||||
|
||||
// promise-style api
|
||||
client.search({ params }, { options })
|
||||
.then(console.log)
|
||||
.catch(console.log)
|
||||
|
||||
// async-style (sugar syntax on top of promises)
|
||||
const response = await client.search({ params }, { options })
|
||||
console.log(response)
|
||||
----
|
||||
|
||||
If you are already using the promise-style API, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Remove the current abort API and use the new AbortController standard
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The old abort API makes sense for callbacks but it's annoying to use with promises
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// callback-style api
|
||||
const request = client.search({ params }, { options }, (err, result) => {
|
||||
console.log(err) // RequestAbortedError
|
||||
})
|
||||
|
||||
request.abort()
|
||||
|
||||
// promise-style api
|
||||
const promise = client.search({ params }, { options })
|
||||
|
||||
promise
|
||||
.then(console.log)
|
||||
.catch(console.log) // RequestAbortedError
|
||||
|
||||
promise.abort()
|
||||
----
|
||||
|
||||
Node v12 has added the standard https://nodejs.org/api/globals.html#globals_class_abortcontroller[`AbortController`] API which is designed to work well with both callbacks and promises.
|
||||
[source,js]
|
||||
----
|
||||
const ac = new AbortController()
|
||||
client.search({ params }, { signal: ac.signal })
|
||||
.then(console.log)
|
||||
.catch(console.log) // RequestAbortedError
|
||||
|
||||
ac.abort()
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[remove-body-key]]
|
||||
===== Remove the body key from the request
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Thanks to the new types we are developing now we know exactly where a parameter should go.
|
||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
||||
|
||||
This could be a rather big breaking change, so a double solution could be used during the 8.x lifecycle. (accepting body keys without them being wrapped in the body as well as the current solution).
|
||||
|
||||
To convert code from 7.x, you need to remove the `body` parameter in all the endpoints request.
|
||||
For instance, this is an example for the `search` endpoint:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// to
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Migrate to new separate transport
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
The separated transport has been rewritten in TypeScript and has already dropped the callback style API.
|
||||
Given that now is separated, most of the Elasticsearch specific concepts have been removed, and the client will likely need to extend parts of it for reintroducing them.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== The returned value of API calls is the body and not the HTTP related keys
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The client API leaks HTTP-related notions in many places, and removing them would definitely improve the DX.
|
||||
The client will expose a new request-specific option to still get the full response details.
|
||||
|
||||
The new behaviour returns the `body` value directly as response.
|
||||
If you want to have the 7.x response format, you need to add `meta : true` in the request.
|
||||
This will return all the HTTP meta information, including the `body`.
|
||||
|
||||
For instance, this is an example for the `search` endpoint:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(response) // { body: SearchResponse, statusCode: number, headers: object, warnings: array }
|
||||
|
||||
// to
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
})
|
||||
console.log(response) // SearchResponse
|
||||
|
||||
// with a bit of TypeScript and JavaScript magic...
|
||||
const response = await client.search({
|
||||
index: 'test',
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}, {
|
||||
meta: true
|
||||
})
|
||||
console.log(response) // { body: SearchResponse, statusCode: number, headers: object, warnings: array }
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Use a weighted connection pool
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
Move from the current cluster connection pool to a weight-based implementation.
|
||||
This new implementation offers better performances and runs less code in the background, the old connection pool can still be used.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Migrate to the "undici" http client
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small to none*
|
||||
|
||||
By default, the HTTP client will no longer be the default Node.js HTTP client, but https://github.com/nodejs/undici[undici] instead.
|
||||
Undici is a brand new HTTP client written from scratch, it offers vastly improved performances and has better support for promises.
|
||||
Furthermore, it offers comprehensive and predictable error handling. The old HTTP client can still be used.
|
||||
If you weren't extending the internals of the client, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Drop support for old camelCased keys
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Medium*
|
||||
|
||||
Currently, every path or query parameter could be expressed in both `snake_case` and `camelCase`. Internally the client will convert everything to `snake_case`.
|
||||
This was done in an effort to reduce the friction of migrating from the legacy to the new client, but now it no longer makes sense.
|
||||
If you are already using `snake_case` keys, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Rename `ssl` option to `tls`
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
People usually refers to this as `tls`, furthermore, internally we use the tls API and Node.js refers to it as tls everywhere.
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
ssl: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
|
||||
// after
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
tls: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Remove prototype poisoning protection
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Prototype poisoning protection is very useful, but it can cause performances issues with big payloads.
|
||||
In v8 it will be removed, and the documentation will show how to add it back with a custom serializer.
|
||||
|
||||
[discrete]
|
||||
===== Remove client extensions API
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Large*
|
||||
|
||||
Nowadays the client support the entire Elasticsearch API, and the `transport.request` method can be used if necessary. The client extensions API have no reason to exist.
|
||||
[source,js]
|
||||
----
|
||||
client.extend('utility.index', ({ makeRequest }) => {
|
||||
return function _index (params, options) {
|
||||
// your code
|
||||
}
|
||||
})
|
||||
|
||||
client.utility.index(...)
|
||||
----
|
||||
|
||||
If you weren't using client extensions, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Move to TypeScript
|
||||
|
||||
*Breaking: No* | *Migration effort: None*
|
||||
|
||||
The new separated transport is already written in TypeScript, and it makes sense that the client v8 will be fully written in TypeScript as well.
|
||||
|
||||
[discrete]
|
||||
===== Move from emitter-like interface to a diagnostic method
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
Currently, the client offers a subset of methods of the `EventEmitter` class, v8 will ship with a `diagnostic` property which will be a proper event emitter.
|
||||
[source,js]
|
||||
----
|
||||
// from
|
||||
client.on('request', console.log)
|
||||
|
||||
// to
|
||||
client.diagnostic.on('request', console.log)
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== Remove username & password properties from Cloud configuration
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
The Cloud configuration does not support ApiKey and Bearer auth, while the `auth` options does.
|
||||
There is no need to keep the legacy basic auth support in the cloud configuration.
|
||||
[source,js]
|
||||
----
|
||||
// before
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>',
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
|
||||
// after
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
If you are already passing the basic auth options in the `auth` configuration, this won't be a breaking change for you.
|
||||
|
||||
[discrete]
|
||||
===== Calling `client.close` will reject new requests
|
||||
|
||||
Once you call `client.close` every new request after that will be rejected with a `NoLivingConnectionsError`. In-flight requests will be executed normally unless an in-flight request requires a retry, in which case it will be rejected.
|
||||
|
||||
[discrete]
|
||||
===== Parameters rename
|
||||
|
||||
- `ilm.delete_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `ilm.get_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `ilm.put_lifecycle`: `policy` parameter has been renamed to `name`
|
||||
- `snapshot.cleanup_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.create_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.delete_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.get_repository`: `repository` parameter has been renamed to `name`
|
||||
- `snapshot.verify_repository`: `repository` parameter has been renamed to `name`
|
||||
|
||||
[discrete]
|
||||
===== Removal of snake_cased methods
|
||||
|
||||
The v7 client provided snake_cased methods, such as `client.delete_by_query`. This is no longer supported, now only camelCased method are present.
|
||||
So `client.delete_by_query` can be accessed with `client.deleteByQuery`
|
||||
|
||||
@ -1,35 +0,0 @@
|
||||
[[child]]
|
||||
=== Creating a child client
|
||||
|
||||
There are some use cases where you may need multiple instances of the client.
|
||||
You can easily do that by calling `new Client()` as many times as you need, but
|
||||
you will lose all the benefits of using one single client, such as the long
|
||||
living connections and the connection pool handling. To avoid this problem, the
|
||||
client offers a `child` API, which returns a new client instance that shares the
|
||||
connection pool with the parent client.
|
||||
|
||||
NOTE: The event emitter is shared between the parent and the child(ren). If you
|
||||
extend the parent client, the child client will have the same extensions, while
|
||||
if the child client adds an extension, the parent client will not be extended.
|
||||
|
||||
You can pass to the `child` every client option you would pass to a normal
|
||||
client, but the connection pool specific options (`ssl`, `agent`, `pingTimeout`,
|
||||
`Connection`, and `resurrectStrategy`).
|
||||
|
||||
CAUTION: If you call `close` in any of the parent/child clients, every client
|
||||
will be closed.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const child = client.child({
|
||||
headers: { 'x-foo': 'bar' },
|
||||
})
|
||||
|
||||
client.info().then(console.log, console.log)
|
||||
child.info().then(console.log, console.log)
|
||||
----
|
||||
@ -1,12 +0,0 @@
|
||||
[[client-configuration]]
|
||||
== Configuration
|
||||
|
||||
|
||||
The client is designed to be easily configured for your needs. In the following
|
||||
section, you can see the possible options that you can use to configure it.
|
||||
|
||||
* <<basic-config>>
|
||||
* <<advanced-config>>
|
||||
* <<timeout-best-practices>>
|
||||
* <<child>>
|
||||
* <<client-testing>>
|
||||
@ -1,738 +0,0 @@
|
||||
[[client-connecting]]
|
||||
== Connecting
|
||||
|
||||
This page contains the information you need to connect and use the Client with
|
||||
{es}.
|
||||
|
||||
**On this page**
|
||||
|
||||
* <<authentication, Authentication options>>
|
||||
* <<client-usage, Using the client>>
|
||||
* <<client-faas-env, Using the Client in a Function-as-a-Service Environment>>
|
||||
* <<client-connect-proxy, Connecting through a proxy>>
|
||||
* <<client-error-handling, Handling errors>>
|
||||
* <<keep-alive, Keep-alive connections>>
|
||||
* <<close-connections, Closing a client's connections>>
|
||||
* <<product-check, Automatic product check>>
|
||||
|
||||
[[authentication]]
|
||||
[discrete]
|
||||
=== Authentication
|
||||
|
||||
This document contains code snippets to show you how to connect to various {es}
|
||||
providers.
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-ec]]
|
||||
==== Elastic Cloud
|
||||
|
||||
If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers
|
||||
an easy way to connect to it via the `cloud` option. You must pass the Cloud ID
|
||||
that you can find in the cloud console, then your username and password inside
|
||||
the `auth` option.
|
||||
|
||||
NOTE: When connecting to Elastic Cloud, the client will automatically enable
|
||||
both request and response compression by default, since it yields significant
|
||||
throughput improvements. Moreover, the client will also set the tls option
|
||||
`secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still
|
||||
override this option by configuring them.
|
||||
|
||||
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are
|
||||
behind a load balancer, Elastic Cloud will take care of everything for you.
|
||||
Take a look https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[here]
|
||||
to know more.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[connect-self-managed-new]]
|
||||
=== Connecting to a self-managed cluster
|
||||
|
||||
By default {es} will start with security features like authentication and TLS
|
||||
enabled. To connect to the {es} cluster you'll need to configure the Node.js {es}
|
||||
client to use HTTPS with the generated CA certificate in order to make requests
|
||||
successfully.
|
||||
|
||||
If you're just getting started with {es} we recommend reading the documentation
|
||||
on https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html[configuring]
|
||||
and
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/starting-elasticsearch.html[starting {es}]
|
||||
to ensure your cluster is running as expected.
|
||||
|
||||
When you start {es} for the first time you'll see a distinct block like the one
|
||||
below in the output from {es} (you may have to scroll up if it's been a while):
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
|
||||
-> Elasticsearch security features have been automatically configured!
|
||||
-> Authentication is enabled and cluster connections are encrypted.
|
||||
|
||||
-> Password for the elastic user (reset with `bin/elasticsearch-reset-password -u elastic`):
|
||||
lhQpLELkjkrawaBoaz0Q
|
||||
|
||||
-> HTTP CA certificate SHA-256 fingerprint:
|
||||
a52dd93511e8c6045e21f16654b77c9ee0f34aea26d9f40320b531c474676228
|
||||
...
|
||||
|
||||
----
|
||||
|
||||
Depending on the circumstances there are two options for verifying the HTTPS
|
||||
connection, either verifying with the CA certificate itself or via the HTTP CA
|
||||
certificate fingerprint.
|
||||
|
||||
[discrete]
|
||||
[[auth-tls]]
|
||||
==== TLS configuration
|
||||
|
||||
The generated root CA certificate can be found in the `certs` directory in your
|
||||
{es} config location (`$ES_CONF_PATH/certs/http_ca.crt`). If you're running {es}
|
||||
in Docker there is
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html[additional documentation for retrieving the CA certificate].
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, and
|
||||
the certificates used to sign these requests will be verified. To turn off
|
||||
certificate verification, you must specify an `tls` object in the top level
|
||||
config and set `rejectUnauthorized: false`. The default `tls` values are the
|
||||
same that Node.js's https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[`tls.connect()`]
|
||||
uses.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
tls: {
|
||||
ca: fs.readFileSync('./http_ca.crt'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-ca-fingerprint]]
|
||||
==== CA fingerprint
|
||||
|
||||
You can configure the client to only trust certificates that are signed by a specific CA certificate
|
||||
(CA certificate pinning) by providing a `caFingerprint` option.
|
||||
This will verify that the fingerprint of the CA certificate that has signed
|
||||
the certificate of the server matches the supplied value.
|
||||
You must configure a SHA256 digest.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://example.com'
|
||||
auth: { ... },
|
||||
// the fingerprint (SHA256) of the CA certificate that is used to sign
|
||||
// the certificate that the Elasticsearch node presents for TLS.
|
||||
caFingerprint: '20:0D:CA:FA:76:...',
|
||||
tls: {
|
||||
// might be required if it's a self-signed certificate
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
The certificate fingerprint can be calculated using `openssl x509` with the
|
||||
certificate file:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
openssl x509 -fingerprint -sha256 -noout -in /path/to/http_ca.crt
|
||||
----
|
||||
|
||||
If you don't have access to the generated CA file from {es} you can use the
|
||||
following script to output the root CA fingerprint of the {es} instance with
|
||||
`openssl s_client`:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
# Replace the values of 'localhost' and '9200' to the
|
||||
# corresponding host and port values for the cluster.
|
||||
openssl s_client -connect localhost:9200 -servername localhost -showcerts </dev/null 2>/dev/null \
|
||||
| openssl x509 -fingerprint -sha256 -noout -in /dev/stdin
|
||||
----
|
||||
|
||||
The output of `openssl x509` will look something like this:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
SHA256 Fingerprint=A5:2D:D9:35:11:E8:C6:04:5E:21:F1:66:54:B7:7C:9E:E0:F3:4A:EA:26:D9:F4:03:20:B5:31:C4:74:67:62:28
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[connect-no-security]]
|
||||
=== Connecting without security enabled
|
||||
|
||||
WARNING: Running {es} without security enabled is not recommended.
|
||||
|
||||
If your cluster is configured with
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/security-settings.html[security explicitly disabled]
|
||||
then you can connect via HTTP:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://example.com'
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-strategies]]
|
||||
=== Authentication strategies
|
||||
|
||||
Following you can find all the supported authentication strategies.
|
||||
|
||||
[discrete]
|
||||
[[auth-apikey]]
|
||||
==== ApiKey authentication
|
||||
|
||||
You can use the
|
||||
{ref-7x}/security-api-create-api-key.html[ApiKey]
|
||||
authentication by passing the `apiKey` parameter via the `auth` option. The
|
||||
`apiKey` parameter can be either a base64 encoded string or an object with the
|
||||
values that you can obtain from the
|
||||
{ref-7x}/security-api-create-api-key.html[create api key endpoint].
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the ApiKey
|
||||
configuration, the ApiKey takes precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: 'base64EncodedKey'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: {
|
||||
id: 'foo',
|
||||
api_key: 'bar'
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[auth-bearer]]
|
||||
==== Bearer authentication
|
||||
|
||||
You can provide your credentials by passing the `bearer` token
|
||||
parameter via the `auth` option.
|
||||
Useful for https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html[service account tokens].
|
||||
Be aware that it does not handle automatic token refresh.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
bearer: 'token'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[auth-basic]]
|
||||
==== Basic authentication
|
||||
|
||||
You can provide your credentials by passing the `username` and `password`
|
||||
parameters via the `auth` option.
|
||||
|
||||
NOTE: If you provide both basic authentication credentials and the Api Key
|
||||
configuration, the Api Key will take precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
Otherwise, you can provide your credentials in the node(s) URL.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://username:password@localhost:9200'
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-usage]]
|
||||
=== Usage
|
||||
|
||||
Using the client is straightforward, it supports all the public APIs of {es},
|
||||
and every method exposes the same signature.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
The returned value of every API call is the response body from {es}.
|
||||
If you need to access additonal metadata, such as the status code or headers,
|
||||
you must specify `meta: true` in the request options:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, { meta: true })
|
||||
----
|
||||
|
||||
In this case, the result will be:
|
||||
[source,ts]
|
||||
----
|
||||
{
|
||||
body: object | boolean
|
||||
statusCode: number
|
||||
headers: object
|
||||
warnings: string[],
|
||||
meta: object
|
||||
}
|
||||
----
|
||||
|
||||
NOTE: The body is a boolean value when you use `HEAD` APIs.
|
||||
|
||||
[discrete]
|
||||
==== Aborting a request
|
||||
|
||||
If needed, you can abort a running request by using the `AbortController` standard.
|
||||
|
||||
CAUTION: If you abort a request, the request will fail with a
|
||||
`RequestAbortedError`.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const AbortController = require('node-abort-controller')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const abortController = new AbortController()
|
||||
setImmediate(() => abortController.abort())
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, { signal: abortController.signal })
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Request specific options
|
||||
|
||||
If needed you can pass request specific options in a second object:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
The supported request specific options are:
|
||||
[cols=2*]
|
||||
|===
|
||||
|`ignore`
|
||||
|`number[]` - HTTP status codes which should not be considered errors for this request. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`requestTimeout`
|
||||
|`number | string | null` - Max request timeout for the request in milliseconds. This overrides the client default, which is to not time out at all. See https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-network.html#_http_client_configuration[Elasticsearch best practices for HTML clients] for more info. +
|
||||
_Default:_ No timeout
|
||||
|
||||
|`retryOnTimeout`
|
||||
|`boolean` - Retry requests that have timed out.
|
||||
_Default:_ `false`
|
||||
|
||||
|`maxRetries`
|
||||
|`number` - Max number of retries for the request, it overrides the client default. +
|
||||
_Default:_ `3`
|
||||
|
||||
|`compression`
|
||||
|`string | boolean` - Enables body compression for the request. +
|
||||
_Options:_ `false`, `'gzip'` +
|
||||
_Default:_ `false`
|
||||
|
||||
|`asStream`
|
||||
|`boolean` - Instead of getting the parsed body back, you get the raw Node.js stream of data. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`headers`
|
||||
|`object` - Custom headers for the request. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`querystring`
|
||||
|`object` - Custom querystring for the request. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`id`
|
||||
|`any` - Custom request id. _(overrides the top level request id generator)_ +
|
||||
_Default:_ `null`
|
||||
|
||||
|`context`
|
||||
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
|
||||
_Default:_ `null`
|
||||
|
||||
|`opaqueId`
|
||||
|`string` - Set the `X-Opaque-Id` HTTP header. See {ref}/api-conventions.html#x-opaque-id
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxResponseSize`
|
||||
|`number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`maxCompressedResponseSize`
|
||||
|`number` - When configured, it verifies that the compressed response size is lower than the configured number, if it's higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH +
|
||||
_Default:_ `null`
|
||||
|
||||
|`signal`
|
||||
|`AbortSignal` - The AbortSignal instance to allow request abortion. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`meta`
|
||||
|`boolean` - Rather than returning the body, return an object containing `body`, `statusCode`, `headers` and `meta` keys +
|
||||
_Default_: `false`
|
||||
|
||||
|`redaction`
|
||||
|`object` - Options for redacting potentially sensitive data from error metadata. See <<redaction>>.
|
||||
|
||||
|`retryBackoff`
|
||||
|`(min: number, max: number, attempt: number) => number;` - A function that calculates how long to sleep, in seconds, before the next request retry +
|
||||
_Default:_ A built-in function that uses exponential backoff with jitter.
|
||||
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
[[client-faas-env]]
|
||||
=== Using the Client in a Function-as-a-Service Environment
|
||||
|
||||
This section illustrates the best practices for leveraging the {es} client in a Function-as-a-Service (FaaS) environment.
|
||||
The most influential optimization is to initialize the client outside of the function, the global scope.
|
||||
This practice does not only improve performance but also enables background functionality as – for example – https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how[sniffing].
|
||||
The following examples provide a skeleton for the best practices.
|
||||
|
||||
[discrete]
|
||||
==== GCP Cloud Functions
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
exports.testFunction = async function (req, res) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== AWS Lambda
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
exports.handler = async function (event, context) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Azure Functions
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
module.exports = async function (context, req) {
|
||||
// use the client
|
||||
}
|
||||
----
|
||||
|
||||
Resources used to assess these recommendations:
|
||||
|
||||
- https://cloud.google.com/functions/docs/bestpractices/tips#use_global_variables_to_reuse_objects_in_future_invocations[GCP Cloud Functions: Tips & Tricks]
|
||||
- https://docs.aws.amazon.com/lambda/latest/dg/best-practices.html[Best practices for working with AWS Lambda functions]
|
||||
- https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python?tabs=azurecli-linux%2Capplication-level#global-variables[Azure Functions Python developer guide]
|
||||
- https://docs.aws.amazon.com/lambda/latest/operatorguide/global-scope.html[AWS Lambda: Comparing the effect of global scope]
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-connect-proxy]]
|
||||
=== Connecting through a proxy
|
||||
|
||||
~Added~ ~in~ ~`v7.10.0`~
|
||||
|
||||
If you need to pass through an http(s) proxy for connecting to {es}, the client
|
||||
out of the box offers a handy configuration for helping you with it. Under the
|
||||
hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module.
|
||||
|
||||
IMPORTANT: In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations.
|
||||
To use a proxy, you will need to use the `HttpConnection` class from `@elastic/transport` instead.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
import { HttpConnection } from '@elastic/transport'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http://localhost:8080',
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
Basic authentication is supported as well:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http:user:pwd@//localhost:8080',
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
If you are connecting through a non-http(s) proxy, such as a `socks5` or `pac`,
|
||||
you can use the `agent` option to configure it.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const SocksProxyAgent = require('socks-proxy-agent')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent () {
|
||||
return new SocksProxyAgent('socks://127.0.0.1:1080')
|
||||
},
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[client-error-handling]]
|
||||
=== Error handling
|
||||
|
||||
The client exposes a variety of error objects that you can use to enhance your
|
||||
error handling. You can find all the error objects inside the `errors` key in
|
||||
the client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { errors } = require('@elastic/elasticsearch')
|
||||
console.log(errors)
|
||||
----
|
||||
|
||||
|
||||
You can find the errors exported by the client in the table below.
|
||||
|
||||
[cols=3*]
|
||||
|===
|
||||
|*Error*
|
||||
|*Description*
|
||||
|*Properties*
|
||||
|
||||
|`ElasticsearchClientError`
|
||||
|Every error inherits from this class, it is the basic error generated by the client.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
|
||||
|`TimeoutError`
|
||||
|Generated when a request exceeds the `requestTimeout` option.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`ConnectionError`
|
||||
|Generated when an error occurs during the request, it can be a connection error or a malformed stream of data.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`RequestAbortedError`
|
||||
|Generated if the user calls the `request.abort()` method.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`NoLivingConnectionsError`
|
||||
|Given the configuration, the ConnectionPool was not able to find a usable Connection for this request.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
|
||||
|`SerializationError`
|
||||
|Generated if the serialization fails.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `data` - `object`, the object to serialize
|
||||
|
||||
|`DeserializationError`
|
||||
|Generated if the deserialization fails.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `data` - `string`, the string to deserialize
|
||||
|
||||
|`ConfigurationError`
|
||||
|Generated if there is a malformed configuration or parameter.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
|
||||
|`ResponseError`
|
||||
|Generated when in case of a `4xx` or `5xx` response.
|
||||
a|* `name` - `string`
|
||||
* `message` - `string`
|
||||
* `meta` - `object`, contains all the information about the request
|
||||
* `body` - `object`, the response body
|
||||
* `statusCode` - `object`, the response headers
|
||||
* `headers` - `object`, the response status code
|
||||
|===
|
||||
|
||||
[[keep-alive]]
|
||||
[discrete]
|
||||
=== Keep-alive connections
|
||||
|
||||
By default, the client uses persistent, keep-alive connections to reduce the overhead of creating a new HTTP connection for each Elasticsearch request.
|
||||
If you are using the default `UndiciConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 10 minutes.
|
||||
If you are using the legacy `HttpConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 1 minute.
|
||||
|
||||
If you need to disable keep-alive connections, you can override the HTTP agent with your preferred https://nodejs.org/api/http.html#http_new_agent_options[HTTP agent options]:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// the function takes as parameter the option
|
||||
// object passed to the Connection constructor
|
||||
agent: (opts) => new CustomAgent()
|
||||
})
|
||||
----
|
||||
|
||||
Or you can disable the HTTP agent entirely:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// Disable agent and keep-alive
|
||||
agent: false
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[close-connections]]
|
||||
=== Closing a client's connections
|
||||
|
||||
If you would like to close all open connections being managed by an instance of the client, use the `close()` function:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
});
|
||||
client.close();
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[product-check]]
|
||||
=== Automatic product check
|
||||
|
||||
Since v7.14.0, the client performs a required product check before the first call.
|
||||
This pre-flight product check allows the client to establish the version of Elasticsearch
|
||||
that it is communicating with. The product check requires one additional HTTP request to
|
||||
be sent to the server as part of the request pipeline before the main API call is sent.
|
||||
In most cases, this will succeed during the very first API call that the client sends.
|
||||
Once the product check completes, no further product check HTTP requests are sent for
|
||||
subsequent API calls.
|
||||
488
docs/docset.yml
Normal file
488
docs/docset.yml
Normal file
@ -0,0 +1,488 @@
|
||||
project: 'Node.js client'
|
||||
exclude:
|
||||
- examples/proxy/README.md
|
||||
cross_links:
|
||||
- elasticsearch
|
||||
toc:
|
||||
- toc: reference
|
||||
- toc: release-notes
|
||||
subs:
|
||||
ref: "https://www.elastic.co/guide/en/elasticsearch/reference/current"
|
||||
ref-bare: "https://www.elastic.co/guide/en/elasticsearch/reference"
|
||||
ref-8x: "https://www.elastic.co/guide/en/elasticsearch/reference/8.1"
|
||||
ref-80: "https://www.elastic.co/guide/en/elasticsearch/reference/8.0"
|
||||
ref-7x: "https://www.elastic.co/guide/en/elasticsearch/reference/7.17"
|
||||
ref-70: "https://www.elastic.co/guide/en/elasticsearch/reference/7.0"
|
||||
ref-60: "https://www.elastic.co/guide/en/elasticsearch/reference/6.0"
|
||||
ref-64: "https://www.elastic.co/guide/en/elasticsearch/reference/6.4"
|
||||
xpack-ref: "https://www.elastic.co/guide/en/x-pack/6.2"
|
||||
logstash-ref: "https://www.elastic.co/guide/en/logstash/current"
|
||||
kibana-ref: "https://www.elastic.co/guide/en/kibana/current"
|
||||
kibana-ref-all: "https://www.elastic.co/guide/en/kibana"
|
||||
beats-ref-root: "https://www.elastic.co/guide/en/beats"
|
||||
beats-ref: "https://www.elastic.co/guide/en/beats/libbeat/current"
|
||||
beats-ref-60: "https://www.elastic.co/guide/en/beats/libbeat/6.0"
|
||||
beats-ref-63: "https://www.elastic.co/guide/en/beats/libbeat/6.3"
|
||||
beats-devguide: "https://www.elastic.co/guide/en/beats/devguide/current"
|
||||
auditbeat-ref: "https://www.elastic.co/guide/en/beats/auditbeat/current"
|
||||
packetbeat-ref: "https://www.elastic.co/guide/en/beats/packetbeat/current"
|
||||
metricbeat-ref: "https://www.elastic.co/guide/en/beats/metricbeat/current"
|
||||
filebeat-ref: "https://www.elastic.co/guide/en/beats/filebeat/current"
|
||||
functionbeat-ref: "https://www.elastic.co/guide/en/beats/functionbeat/current"
|
||||
winlogbeat-ref: "https://www.elastic.co/guide/en/beats/winlogbeat/current"
|
||||
heartbeat-ref: "https://www.elastic.co/guide/en/beats/heartbeat/current"
|
||||
journalbeat-ref: "https://www.elastic.co/guide/en/beats/journalbeat/current"
|
||||
ingest-guide: "https://www.elastic.co/guide/en/ingest/current"
|
||||
fleet-guide: "https://www.elastic.co/guide/en/fleet/current"
|
||||
apm-guide-ref: "https://www.elastic.co/guide/en/apm/guide/current"
|
||||
apm-guide-7x: "https://www.elastic.co/guide/en/apm/guide/7.17"
|
||||
apm-app-ref: "https://www.elastic.co/guide/en/kibana/current"
|
||||
apm-agents-ref: "https://www.elastic.co/guide/en/apm/agent"
|
||||
apm-android-ref: "https://www.elastic.co/guide/en/apm/agent/android/current"
|
||||
apm-py-ref: "https://www.elastic.co/guide/en/apm/agent/python/current"
|
||||
apm-py-ref-3x: "https://www.elastic.co/guide/en/apm/agent/python/3.x"
|
||||
apm-node-ref-index: "https://www.elastic.co/guide/en/apm/agent/nodejs"
|
||||
apm-node-ref: "https://www.elastic.co/guide/en/apm/agent/nodejs/current"
|
||||
apm-node-ref-1x: "https://www.elastic.co/guide/en/apm/agent/nodejs/1.x"
|
||||
apm-rum-ref: "https://www.elastic.co/guide/en/apm/agent/rum-js/current"
|
||||
apm-ruby-ref: "https://www.elastic.co/guide/en/apm/agent/ruby/current"
|
||||
apm-java-ref: "https://www.elastic.co/guide/en/apm/agent/java/current"
|
||||
apm-go-ref: "https://www.elastic.co/guide/en/apm/agent/go/current"
|
||||
apm-dotnet-ref: "https://www.elastic.co/guide/en/apm/agent/dotnet/current"
|
||||
apm-php-ref: "https://www.elastic.co/guide/en/apm/agent/php/current"
|
||||
apm-ios-ref: "https://www.elastic.co/guide/en/apm/agent/swift/current"
|
||||
apm-lambda-ref: "https://www.elastic.co/guide/en/apm/lambda/current"
|
||||
apm-attacher-ref: "https://www.elastic.co/guide/en/apm/attacher/current"
|
||||
docker-logging-ref: "https://www.elastic.co/guide/en/beats/loggingplugin/current"
|
||||
esf-ref: "https://www.elastic.co/guide/en/esf/current"
|
||||
kinesis-firehose-ref: "https://www.elastic.co/guide/en/kinesis/{{kinesis_version}}"
|
||||
estc-welcome-current: "https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions/current"
|
||||
estc-welcome: "https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions/current"
|
||||
estc-welcome-all: "https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions"
|
||||
hadoop-ref: "https://www.elastic.co/guide/en/elasticsearch/hadoop/current"
|
||||
stack-ref: "https://www.elastic.co/guide/en/elastic-stack/current"
|
||||
stack-ref-67: "https://www.elastic.co/guide/en/elastic-stack/6.7"
|
||||
stack-ref-68: "https://www.elastic.co/guide/en/elastic-stack/6.8"
|
||||
stack-ref-70: "https://www.elastic.co/guide/en/elastic-stack/7.0"
|
||||
stack-ref-80: "https://www.elastic.co/guide/en/elastic-stack/8.0"
|
||||
stack-ov: "https://www.elastic.co/guide/en/elastic-stack-overview/current"
|
||||
stack-gs: "https://www.elastic.co/guide/en/elastic-stack-get-started/current"
|
||||
stack-gs-current: "https://www.elastic.co/guide/en/elastic-stack-get-started/current"
|
||||
javaclient: "https://www.elastic.co/guide/en/elasticsearch/client/java-api/current"
|
||||
java-api-client: "https://www.elastic.co/guide/en/elasticsearch/client/java-api-client/current"
|
||||
java-rest: "https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current"
|
||||
jsclient: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current"
|
||||
jsclient-current: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current"
|
||||
es-ruby-client: "https://www.elastic.co/guide/en/elasticsearch/client/ruby-api/current"
|
||||
es-dotnet-client: "https://www.elastic.co/guide/en/elasticsearch/client/net-api/current"
|
||||
es-php-client: "https://www.elastic.co/guide/en/elasticsearch/client/php-api/current"
|
||||
es-python-client: "https://www.elastic.co/guide/en/elasticsearch/client/python-api/current"
|
||||
defguide: "https://www.elastic.co/guide/en/elasticsearch/guide/2.x"
|
||||
painless: "https://www.elastic.co/guide/en/elasticsearch/painless/current"
|
||||
plugins: "https://www.elastic.co/guide/en/elasticsearch/plugins/current"
|
||||
plugins-8x: "https://www.elastic.co/guide/en/elasticsearch/plugins/8.1"
|
||||
plugins-7x: "https://www.elastic.co/guide/en/elasticsearch/plugins/7.17"
|
||||
plugins-6x: "https://www.elastic.co/guide/en/elasticsearch/plugins/6.8"
|
||||
glossary: "https://www.elastic.co/guide/en/elastic-stack-glossary/current"
|
||||
upgrade_guide: "https://www.elastic.co/products/upgrade_guide"
|
||||
blog-ref: "https://www.elastic.co/blog/"
|
||||
curator-ref: "https://www.elastic.co/guide/en/elasticsearch/client/curator/current"
|
||||
curator-ref-current: "https://www.elastic.co/guide/en/elasticsearch/client/curator/current"
|
||||
metrics-ref: "https://www.elastic.co/guide/en/metrics/current"
|
||||
metrics-guide: "https://www.elastic.co/guide/en/metrics/guide/current"
|
||||
logs-ref: "https://www.elastic.co/guide/en/logs/current"
|
||||
logs-guide: "https://www.elastic.co/guide/en/logs/guide/current"
|
||||
uptime-guide: "https://www.elastic.co/guide/en/uptime/current"
|
||||
observability-guide: "https://www.elastic.co/guide/en/observability/current"
|
||||
observability-guide-all: "https://www.elastic.co/guide/en/observability"
|
||||
siem-guide: "https://www.elastic.co/guide/en/siem/guide/current"
|
||||
security-guide: "https://www.elastic.co/guide/en/security/current"
|
||||
security-guide-all: "https://www.elastic.co/guide/en/security"
|
||||
endpoint-guide: "https://www.elastic.co/guide/en/endpoint/current"
|
||||
sql-odbc: "https://www.elastic.co/guide/en/elasticsearch/sql-odbc/current"
|
||||
ecs-ref: "https://www.elastic.co/guide/en/ecs/current"
|
||||
ecs-logging-ref: "https://www.elastic.co/guide/en/ecs-logging/overview/current"
|
||||
ecs-logging-go-logrus-ref: "https://www.elastic.co/guide/en/ecs-logging/go-logrus/current"
|
||||
ecs-logging-go-zap-ref: "https://www.elastic.co/guide/en/ecs-logging/go-zap/current"
|
||||
ecs-logging-go-zerolog-ref: "https://www.elastic.co/guide/en/ecs-logging/go-zap/current"
|
||||
ecs-logging-java-ref: "https://www.elastic.co/guide/en/ecs-logging/java/current"
|
||||
ecs-logging-dotnet-ref: "https://www.elastic.co/guide/en/ecs-logging/dotnet/current"
|
||||
ecs-logging-nodejs-ref: "https://www.elastic.co/guide/en/ecs-logging/nodejs/current"
|
||||
ecs-logging-php-ref: "https://www.elastic.co/guide/en/ecs-logging/php/current"
|
||||
ecs-logging-python-ref: "https://www.elastic.co/guide/en/ecs-logging/python/current"
|
||||
ecs-logging-ruby-ref: "https://www.elastic.co/guide/en/ecs-logging/ruby/current"
|
||||
ml-docs: "https://www.elastic.co/guide/en/machine-learning/current"
|
||||
eland-docs: "https://www.elastic.co/guide/en/elasticsearch/client/eland/current"
|
||||
eql-ref: "https://eql.readthedocs.io/en/latest/query-guide"
|
||||
extendtrial: "https://www.elastic.co/trialextension"
|
||||
wikipedia: "https://en.wikipedia.org/wiki"
|
||||
forum: "https://discuss.elastic.co/"
|
||||
xpack-forum: "https://discuss.elastic.co/c/50-x-pack"
|
||||
security-forum: "https://discuss.elastic.co/c/x-pack/shield"
|
||||
watcher-forum: "https://discuss.elastic.co/c/x-pack/watcher"
|
||||
monitoring-forum: "https://discuss.elastic.co/c/x-pack/marvel"
|
||||
graph-forum: "https://discuss.elastic.co/c/x-pack/graph"
|
||||
apm-forum: "https://discuss.elastic.co/c/apm"
|
||||
enterprise-search-ref: "https://www.elastic.co/guide/en/enterprise-search/current"
|
||||
app-search-ref: "https://www.elastic.co/guide/en/app-search/current"
|
||||
workplace-search-ref: "https://www.elastic.co/guide/en/workplace-search/current"
|
||||
enterprise-search-node-ref: "https://www.elastic.co/guide/en/enterprise-search-clients/enterprise-search-node/current"
|
||||
enterprise-search-php-ref: "https://www.elastic.co/guide/en/enterprise-search-clients/php/current"
|
||||
enterprise-search-python-ref: "https://www.elastic.co/guide/en/enterprise-search-clients/python/current"
|
||||
enterprise-search-ruby-ref: "https://www.elastic.co/guide/en/enterprise-search-clients/ruby/current"
|
||||
elastic-maps-service: "https://maps.elastic.co"
|
||||
integrations-docs: "https://docs.elastic.co/en/integrations"
|
||||
integrations-devguide: "https://www.elastic.co/guide/en/integrations-developer/current"
|
||||
time-units: "https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#time-units"
|
||||
byte-units: "https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#byte-units"
|
||||
apm-py-ref-v: "https://www.elastic.co/guide/en/apm/agent/python/current"
|
||||
apm-node-ref-v: "https://www.elastic.co/guide/en/apm/agent/nodejs/current"
|
||||
apm-rum-ref-v: "https://www.elastic.co/guide/en/apm/agent/rum-js/current"
|
||||
apm-ruby-ref-v: "https://www.elastic.co/guide/en/apm/agent/ruby/current"
|
||||
apm-java-ref-v: "https://www.elastic.co/guide/en/apm/agent/java/current"
|
||||
apm-go-ref-v: "https://www.elastic.co/guide/en/apm/agent/go/current"
|
||||
apm-ios-ref-v: "https://www.elastic.co/guide/en/apm/agent/swift/current"
|
||||
apm-dotnet-ref-v: "https://www.elastic.co/guide/en/apm/agent/dotnet/current"
|
||||
apm-php-ref-v: "https://www.elastic.co/guide/en/apm/agent/php/current"
|
||||
ecloud: "Elastic Cloud"
|
||||
esf: "Elastic Serverless Forwarder"
|
||||
ess: "Elasticsearch Service"
|
||||
ece: "Elastic Cloud Enterprise"
|
||||
eck: "Elastic Cloud on Kubernetes"
|
||||
serverless-full: "Elastic Cloud Serverless"
|
||||
serverless-short: "Serverless"
|
||||
es-serverless: "Elasticsearch Serverless"
|
||||
es3: "Elasticsearch Serverless"
|
||||
obs-serverless: "Elastic Observability Serverless"
|
||||
sec-serverless: "Elastic Security Serverless"
|
||||
serverless-docs: "https://docs.elastic.co/serverless"
|
||||
cloud: "https://www.elastic.co/guide/en/cloud/current"
|
||||
ess-utm-params: "?page=docs&placement=docs-body"
|
||||
ess-baymax: "?page=docs&placement=docs-body"
|
||||
ess-trial: "https://cloud.elastic.co/registration?page=docs&placement=docs-body"
|
||||
ess-product: "https://www.elastic.co/cloud/elasticsearch-service?page=docs&placement=docs-body"
|
||||
ess-console: "https://cloud.elastic.co?page=docs&placement=docs-body"
|
||||
ess-console-name: "Elasticsearch Service Console"
|
||||
ess-deployments: "https://cloud.elastic.co/deployments?page=docs&placement=docs-body"
|
||||
ece-ref: "https://www.elastic.co/guide/en/cloud-enterprise/current"
|
||||
eck-ref: "https://www.elastic.co/guide/en/cloud-on-k8s/current"
|
||||
ess-leadin: "You can run Elasticsearch on your own hardware or use our hosted Elasticsearch Service that is available on AWS, GCP, and Azure. https://cloud.elastic.co/registration{ess-utm-params}[Try the Elasticsearch Service for free]."
|
||||
ess-leadin-short: "Our hosted Elasticsearch Service is available on AWS, GCP, and Azure, and you can https://cloud.elastic.co/registration{ess-utm-params}[try it for free]."
|
||||
ess-icon: "image:https://doc-icons.s3.us-east-2.amazonaws.com/logo_cloud.svg[link=\"https://cloud.elastic.co/registration{ess-utm-params}\", title=\"Supported on Elasticsearch Service\"]"
|
||||
ece-icon: "image:https://doc-icons.s3.us-east-2.amazonaws.com/logo_cloud_ece.svg[link=\"https://cloud.elastic.co/registration{ess-utm-params}\", title=\"Supported on Elastic Cloud Enterprise\"]"
|
||||
cloud-only: "This feature is designed for indirect use by https://cloud.elastic.co/registration{ess-utm-params}[Elasticsearch Service], https://www.elastic.co/guide/en/cloud-enterprise/{ece-version-link}[Elastic Cloud Enterprise], and https://www.elastic.co/guide/en/cloud-on-k8s/current[Elastic Cloud on Kubernetes]. Direct use is not supported."
|
||||
ess-setting-change: "image:https://doc-icons.s3.us-east-2.amazonaws.com/logo_cloud.svg[link=\"{ess-trial}\", title=\"Supported on {ess}\"] indicates a change to a supported https://www.elastic.co/guide/en/cloud/current/ec-add-user-settings.html[user setting] for Elasticsearch Service."
|
||||
ess-skip-section: "If you use Elasticsearch Service, skip this section. Elasticsearch Service handles these changes for you."
|
||||
api-cloud: "https://www.elastic.co/docs/api/doc/cloud"
|
||||
api-ece: "https://www.elastic.co/docs/api/doc/cloud-enterprise"
|
||||
api-kibana-serverless: "https://www.elastic.co/docs/api/doc/serverless"
|
||||
es-feature-flag: "This feature is in development and not yet available for use. This documentation is provided for informational purposes only."
|
||||
es-ref-dir: "'{{elasticsearch-root}}/docs/reference'"
|
||||
apm-app: "APM app"
|
||||
uptime-app: "Uptime app"
|
||||
synthetics-app: "Synthetics app"
|
||||
logs-app: "Logs app"
|
||||
metrics-app: "Metrics app"
|
||||
infrastructure-app: "Infrastructure app"
|
||||
siem-app: "SIEM app"
|
||||
security-app: "Elastic Security app"
|
||||
ml-app: "Machine Learning"
|
||||
dev-tools-app: "Dev Tools"
|
||||
ingest-manager-app: "Ingest Manager"
|
||||
stack-manage-app: "Stack Management"
|
||||
stack-monitor-app: "Stack Monitoring"
|
||||
alerts-ui: "Alerts and Actions"
|
||||
rules-ui: "Rules"
|
||||
rac-ui: "Rules and Connectors"
|
||||
connectors-ui: "Connectors"
|
||||
connectors-feature: "Actions and Connectors"
|
||||
stack-rules-feature: "Stack Rules"
|
||||
user-experience: "User Experience"
|
||||
ems: "Elastic Maps Service"
|
||||
ems-init: "EMS"
|
||||
hosted-ems: "Elastic Maps Server"
|
||||
ipm-app: "Index Pattern Management"
|
||||
ingest-pipelines: "ingest pipelines"
|
||||
ingest-pipelines-app: "Ingest Pipelines"
|
||||
ingest-pipelines-cap: "Ingest pipelines"
|
||||
ls-pipelines: "Logstash pipelines"
|
||||
ls-pipelines-app: "Logstash Pipelines"
|
||||
maint-windows: "maintenance windows"
|
||||
maint-windows-app: "Maintenance Windows"
|
||||
maint-windows-cap: "Maintenance windows"
|
||||
custom-roles-app: "Custom Roles"
|
||||
data-source: "data view"
|
||||
data-sources: "data views"
|
||||
data-source-caps: "Data View"
|
||||
data-sources-caps: "Data Views"
|
||||
data-source-cap: "Data view"
|
||||
data-sources-cap: "Data views"
|
||||
project-settings: "Project settings"
|
||||
manage-app: "Management"
|
||||
index-manage-app: "Index Management"
|
||||
data-views-app: "Data Views"
|
||||
rules-app: "Rules"
|
||||
saved-objects-app: "Saved Objects"
|
||||
tags-app: "Tags"
|
||||
api-keys-app: "API keys"
|
||||
transforms-app: "Transforms"
|
||||
connectors-app: "Connectors"
|
||||
files-app: "Files"
|
||||
reports-app: "Reports"
|
||||
maps-app: "Maps"
|
||||
alerts-app: "Alerts"
|
||||
crawler: "Enterprise Search web crawler"
|
||||
ents: "Enterprise Search"
|
||||
app-search-crawler: "App Search web crawler"
|
||||
agent: "Elastic Agent"
|
||||
agents: "Elastic Agents"
|
||||
fleet: "Fleet"
|
||||
fleet-server: "Fleet Server"
|
||||
integrations-server: "Integrations Server"
|
||||
ingest-manager: "Ingest Manager"
|
||||
ingest-management: "ingest management"
|
||||
package-manager: "Elastic Package Manager"
|
||||
integrations: "Integrations"
|
||||
package-registry: "Elastic Package Registry"
|
||||
artifact-registry: "Elastic Artifact Registry"
|
||||
aws: "AWS"
|
||||
stack: "Elastic Stack"
|
||||
xpack: "X-Pack"
|
||||
es: "Elasticsearch"
|
||||
kib: "Kibana"
|
||||
esms: "Elastic Stack Monitoring Service"
|
||||
esms-init: "ESMS"
|
||||
ls: "Logstash"
|
||||
beats: "Beats"
|
||||
auditbeat: "Auditbeat"
|
||||
filebeat: "Filebeat"
|
||||
heartbeat: "Heartbeat"
|
||||
metricbeat: "Metricbeat"
|
||||
packetbeat: "Packetbeat"
|
||||
winlogbeat: "Winlogbeat"
|
||||
functionbeat: "Functionbeat"
|
||||
journalbeat: "Journalbeat"
|
||||
es-sql: "Elasticsearch SQL"
|
||||
esql: "ES|QL"
|
||||
elastic-agent: "Elastic Agent"
|
||||
k8s: "Kubernetes"
|
||||
log-driver-long: "Elastic Logging Plugin for Docker"
|
||||
security: "X-Pack security"
|
||||
security-features: "security features"
|
||||
operator-feature: "operator privileges feature"
|
||||
es-security-features: "Elasticsearch security features"
|
||||
stack-security-features: "Elastic Stack security features"
|
||||
endpoint-sec: "Endpoint Security"
|
||||
endpoint-cloud-sec: "Endpoint and Cloud Security"
|
||||
elastic-defend: "Elastic Defend"
|
||||
elastic-sec: "Elastic Security"
|
||||
elastic-endpoint: "Elastic Endpoint"
|
||||
swimlane: "Swimlane"
|
||||
sn: "ServiceNow"
|
||||
sn-itsm: "ServiceNow ITSM"
|
||||
sn-itom: "ServiceNow ITOM"
|
||||
sn-sir: "ServiceNow SecOps"
|
||||
jira: "Jira"
|
||||
ibm-r: "IBM Resilient"
|
||||
webhook: "Webhook"
|
||||
webhook-cm: "Webhook - Case Management"
|
||||
opsgenie: "Opsgenie"
|
||||
bedrock: "Amazon Bedrock"
|
||||
gemini: "Google Gemini"
|
||||
hive: "TheHive"
|
||||
monitoring: "X-Pack monitoring"
|
||||
monitor-features: "monitoring features"
|
||||
stack-monitor-features: "Elastic Stack monitoring features"
|
||||
watcher: "Watcher"
|
||||
alert-features: "alerting features"
|
||||
reporting: "X-Pack reporting"
|
||||
report-features: "reporting features"
|
||||
graph: "X-Pack graph"
|
||||
graph-features: "graph analytics features"
|
||||
searchprofiler: "Search Profiler"
|
||||
xpackml: "X-Pack machine learning"
|
||||
ml: "machine learning"
|
||||
ml-cap: "Machine learning"
|
||||
ml-init: "ML"
|
||||
ml-features: "machine learning features"
|
||||
stack-ml-features: "Elastic Stack machine learning features"
|
||||
ccr: "cross-cluster replication"
|
||||
ccr-cap: "Cross-cluster replication"
|
||||
ccr-init: "CCR"
|
||||
ccs: "cross-cluster search"
|
||||
ccs-cap: "Cross-cluster search"
|
||||
ccs-init: "CCS"
|
||||
ilm: "index lifecycle management"
|
||||
ilm-cap: "Index lifecycle management"
|
||||
ilm-init: "ILM"
|
||||
dlm: "data lifecycle management"
|
||||
dlm-cap: "Data lifecycle management"
|
||||
dlm-init: "DLM"
|
||||
search-snap: "searchable snapshot"
|
||||
search-snaps: "searchable snapshots"
|
||||
search-snaps-cap: "Searchable snapshots"
|
||||
slm: "snapshot lifecycle management"
|
||||
slm-cap: "Snapshot lifecycle management"
|
||||
slm-init: "SLM"
|
||||
rollup-features: "data rollup features"
|
||||
ipm: "index pattern management"
|
||||
ipm-cap: "Index pattern"
|
||||
rollup: "rollup"
|
||||
rollup-cap: "Rollup"
|
||||
rollups: "rollups"
|
||||
rollups-cap: "Rollups"
|
||||
rollup-job: "rollup job"
|
||||
rollup-jobs: "rollup jobs"
|
||||
rollup-jobs-cap: "Rollup jobs"
|
||||
dfeed: "datafeed"
|
||||
dfeeds: "datafeeds"
|
||||
dfeed-cap: "Datafeed"
|
||||
dfeeds-cap: "Datafeeds"
|
||||
ml-jobs: "machine learning jobs"
|
||||
ml-jobs-cap: "Machine learning jobs"
|
||||
anomaly-detect: "anomaly detection"
|
||||
anomaly-detect-cap: "Anomaly detection"
|
||||
anomaly-job: "anomaly detection job"
|
||||
anomaly-jobs: "anomaly detection jobs"
|
||||
anomaly-jobs-cap: "Anomaly detection jobs"
|
||||
dataframe: "data frame"
|
||||
dataframes: "data frames"
|
||||
dataframe-cap: "Data frame"
|
||||
dataframes-cap: "Data frames"
|
||||
watcher-transform: "payload transform"
|
||||
watcher-transforms: "payload transforms"
|
||||
watcher-transform-cap: "Payload transform"
|
||||
watcher-transforms-cap: "Payload transforms"
|
||||
transform: "transform"
|
||||
transforms: "transforms"
|
||||
transform-cap: "Transform"
|
||||
transforms-cap: "Transforms"
|
||||
dataframe-transform: "transform"
|
||||
dataframe-transform-cap: "Transform"
|
||||
dataframe-transforms: "transforms"
|
||||
dataframe-transforms-cap: "Transforms"
|
||||
dfanalytics-cap: "Data frame analytics"
|
||||
dfanalytics: "data frame analytics"
|
||||
dataframe-analytics-config: "'{dataframe} analytics config'"
|
||||
dfanalytics-job: "'{dataframe} analytics job'"
|
||||
dfanalytics-jobs: "'{dataframe} analytics jobs'"
|
||||
dfanalytics-jobs-cap: "'{dataframe-cap} analytics jobs'"
|
||||
cdataframe: "continuous data frame"
|
||||
cdataframes: "continuous data frames"
|
||||
cdataframe-cap: "Continuous data frame"
|
||||
cdataframes-cap: "Continuous data frames"
|
||||
cdataframe-transform: "continuous transform"
|
||||
cdataframe-transforms: "continuous transforms"
|
||||
cdataframe-transforms-cap: "Continuous transforms"
|
||||
ctransform: "continuous transform"
|
||||
ctransform-cap: "Continuous transform"
|
||||
ctransforms: "continuous transforms"
|
||||
ctransforms-cap: "Continuous transforms"
|
||||
oldetection: "outlier detection"
|
||||
oldetection-cap: "Outlier detection"
|
||||
olscore: "outlier score"
|
||||
olscores: "outlier scores"
|
||||
fiscore: "feature influence score"
|
||||
evaluatedf-api: "evaluate {dataframe} analytics API"
|
||||
evaluatedf-api-cap: "Evaluate {dataframe} analytics API"
|
||||
binarysc: "binary soft classification"
|
||||
binarysc-cap: "Binary soft classification"
|
||||
regression: "regression"
|
||||
regression-cap: "Regression"
|
||||
reganalysis: "regression analysis"
|
||||
reganalysis-cap: "Regression analysis"
|
||||
depvar: "dependent variable"
|
||||
feature-var: "feature variable"
|
||||
feature-vars: "feature variables"
|
||||
feature-vars-cap: "Feature variables"
|
||||
classification: "classification"
|
||||
classification-cap: "Classification"
|
||||
classanalysis: "classification analysis"
|
||||
classanalysis-cap: "Classification analysis"
|
||||
infer-cap: "Inference"
|
||||
infer: "inference"
|
||||
lang-ident-cap: "Language identification"
|
||||
lang-ident: "language identification"
|
||||
data-viz: "Data Visualizer"
|
||||
file-data-viz: "File Data Visualizer"
|
||||
feat-imp: "feature importance"
|
||||
feat-imp-cap: "Feature importance"
|
||||
nlp: "natural language processing"
|
||||
nlp-cap: "Natural language processing"
|
||||
apm-agent: "APM agent"
|
||||
apm-go-agent: "Elastic APM Go agent"
|
||||
apm-go-agents: "Elastic APM Go agents"
|
||||
apm-ios-agent: "Elastic APM iOS agent"
|
||||
apm-ios-agents: "Elastic APM iOS agents"
|
||||
apm-java-agent: "Elastic APM Java agent"
|
||||
apm-java-agents: "Elastic APM Java agents"
|
||||
apm-dotnet-agent: "Elastic APM .NET agent"
|
||||
apm-dotnet-agents: "Elastic APM .NET agents"
|
||||
apm-node-agent: "Elastic APM Node.js agent"
|
||||
apm-node-agents: "Elastic APM Node.js agents"
|
||||
apm-php-agent: "Elastic APM PHP agent"
|
||||
apm-php-agents: "Elastic APM PHP agents"
|
||||
apm-py-agent: "Elastic APM Python agent"
|
||||
apm-py-agents: "Elastic APM Python agents"
|
||||
apm-ruby-agent: "Elastic APM Ruby agent"
|
||||
apm-ruby-agents: "Elastic APM Ruby agents"
|
||||
apm-rum-agent: "Elastic APM Real User Monitoring (RUM) JavaScript agent"
|
||||
apm-rum-agents: "Elastic APM RUM JavaScript agents"
|
||||
apm-lambda-ext: "Elastic APM AWS Lambda extension"
|
||||
project-monitors: "project monitors"
|
||||
project-monitors-cap: "Project monitors"
|
||||
private-location: "Private Location"
|
||||
private-locations: "Private Locations"
|
||||
pwd: "YOUR_PASSWORD"
|
||||
esh: "ES-Hadoop"
|
||||
default-dist: "default distribution"
|
||||
oss-dist: "OSS-only distribution"
|
||||
observability: "Observability"
|
||||
api-request-title: "Request"
|
||||
api-prereq-title: "Prerequisites"
|
||||
api-description-title: "Description"
|
||||
api-path-parms-title: "Path parameters"
|
||||
api-query-parms-title: "Query parameters"
|
||||
api-request-body-title: "Request body"
|
||||
api-response-codes-title: "Response codes"
|
||||
api-response-body-title: "Response body"
|
||||
api-example-title: "Example"
|
||||
api-examples-title: "Examples"
|
||||
api-definitions-title: "Properties"
|
||||
multi-arg: "†footnoteref:[multi-arg,This parameter accepts multiple arguments.]"
|
||||
multi-arg-ref: "†footnoteref:[multi-arg]"
|
||||
yes-icon: "image:https://doc-icons.s3.us-east-2.amazonaws.com/icon-yes.png[Yes,20,15]"
|
||||
no-icon: "image:https://doc-icons.s3.us-east-2.amazonaws.com/icon-no.png[No,20,15]"
|
||||
es-repo: "https://github.com/elastic/elasticsearch/"
|
||||
es-issue: "https://github.com/elastic/elasticsearch/issues/"
|
||||
es-pull: "https://github.com/elastic/elasticsearch/pull/"
|
||||
es-commit: "https://github.com/elastic/elasticsearch/commit/"
|
||||
kib-repo: "https://github.com/elastic/kibana/"
|
||||
kib-issue: "https://github.com/elastic/kibana/issues/"
|
||||
kibana-issue: "'{kib-repo}issues/'"
|
||||
kib-pull: "https://github.com/elastic/kibana/pull/"
|
||||
kibana-pull: "'{kib-repo}pull/'"
|
||||
kib-commit: "https://github.com/elastic/kibana/commit/"
|
||||
ml-repo: "https://github.com/elastic/ml-cpp/"
|
||||
ml-issue: "https://github.com/elastic/ml-cpp/issues/"
|
||||
ml-pull: "https://github.com/elastic/ml-cpp/pull/"
|
||||
ml-commit: "https://github.com/elastic/ml-cpp/commit/"
|
||||
apm-repo: "https://github.com/elastic/apm-server/"
|
||||
apm-issue: "https://github.com/elastic/apm-server/issues/"
|
||||
apm-pull: "https://github.com/elastic/apm-server/pull/"
|
||||
kibana-blob: "https://github.com/elastic/kibana/blob/current/"
|
||||
apm-get-started-ref: "https://www.elastic.co/guide/en/apm/get-started/current"
|
||||
apm-server-ref: "https://www.elastic.co/guide/en/apm/server/current"
|
||||
apm-server-ref-v: "https://www.elastic.co/guide/en/apm/server/current"
|
||||
apm-server-ref-m: "https://www.elastic.co/guide/en/apm/server/master"
|
||||
apm-server-ref-62: "https://www.elastic.co/guide/en/apm/server/6.2"
|
||||
apm-server-ref-64: "https://www.elastic.co/guide/en/apm/server/6.4"
|
||||
apm-server-ref-70: "https://www.elastic.co/guide/en/apm/server/7.0"
|
||||
apm-overview-ref-v: "https://www.elastic.co/guide/en/apm/get-started/current"
|
||||
apm-overview-ref-70: "https://www.elastic.co/guide/en/apm/get-started/7.0"
|
||||
apm-overview-ref-m: "https://www.elastic.co/guide/en/apm/get-started/master"
|
||||
infra-guide: "https://www.elastic.co/guide/en/infrastructure/guide/current"
|
||||
a-data-source: "a data view"
|
||||
icon-bug: "pass:[<span class=\"eui-icon icon-bug\"></span>]"
|
||||
icon-checkInCircleFilled: "pass:[<span class=\"eui-icon icon-checkInCircleFilled\"></span>]"
|
||||
icon-warningFilled: "pass:[<span class=\"eui-icon icon-warningFilled\"></span>]"
|
||||
@ -1,34 +0,0 @@
|
||||
[[examples]]
|
||||
== Examples
|
||||
|
||||
Following you can find some examples on how to use the client.
|
||||
|
||||
* Use of the <<as_stream_examples,asStream>> parameter;
|
||||
* Executing a <<bulk_examples,bulk>> request;
|
||||
* Executing a <<exists_examples,exists>> request;
|
||||
* Executing a <<get_examples,get>> request;
|
||||
* Executing a <<sql_query_examples,sql.query>> request;
|
||||
* Executing a <<update_examples,update>> request;
|
||||
* Executing a <<update_by_query_examples,update by query>> request;
|
||||
* Executing a <<reindex_examples,reindex>> request;
|
||||
* Use of the <<ignore_examples,ignore>> parameter;
|
||||
* Executing a <<msearch_examples,msearch>> request;
|
||||
* How do I <<scroll_examples,scroll>>?
|
||||
* Executing a <<search_examples,search>> request;
|
||||
* I need <<suggest_examples,suggestions>>;
|
||||
* How to use the <<transport_request_examples,transport.request>> method;
|
||||
|
||||
include::asStream.asciidoc[]
|
||||
include::bulk.asciidoc[]
|
||||
include::exists.asciidoc[]
|
||||
include::get.asciidoc[]
|
||||
include::ignore.asciidoc[]
|
||||
include::msearch.asciidoc[]
|
||||
include::scroll.asciidoc[]
|
||||
include::search.asciidoc[]
|
||||
include::suggest.asciidoc[]
|
||||
include::transport.request.asciidoc[]
|
||||
include::sql.query.asciidoc[]
|
||||
include::update.asciidoc[]
|
||||
include::update_by_query.asciidoc[]
|
||||
include::reindex.asciidoc[]
|
||||
@ -1,170 +0,0 @@
|
||||
[[getting-started-js]]
|
||||
== Getting started
|
||||
|
||||
This page guides you through the installation process of the Node.js client,
|
||||
shows you how to instantiate the client, and how to perform basic Elasticsearch
|
||||
operations with it.
|
||||
|
||||
[discrete]
|
||||
=== Requirements
|
||||
|
||||
* https://nodejs.org/[Node.js] version 14.x or newer
|
||||
* https://docs.npmjs.com/downloading-and-installing-node-js-and-npm[`npm`], usually bundled with Node.js
|
||||
|
||||
[discrete]
|
||||
=== Installation
|
||||
|
||||
To install the latest version of the client, run the following command:
|
||||
|
||||
[source,shell]
|
||||
--------------------------
|
||||
npm install @elastic/elasticsearch
|
||||
--------------------------
|
||||
|
||||
Refer to the <<installation>> page to learn more.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Connecting
|
||||
|
||||
You can connect to the Elastic Cloud using an API key and the Elasticsearch
|
||||
endpoint.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://...', // Elasticsearch endpoint
|
||||
auth: {
|
||||
apiKey: { // API key ID and secret
|
||||
id: 'foo',
|
||||
api_key: 'bar',
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
Your Elasticsearch endpoint can be found on the **My deployment** page of your
|
||||
deployment:
|
||||
|
||||
image::images/es-endpoint.jpg[alt="Finding Elasticsearch endpoint",align="center"]
|
||||
|
||||
You can generate an API key on the **Management** page under Security.
|
||||
|
||||
image::images/create-api-key.png[alt="Create API key",align="center"]
|
||||
|
||||
For other connection options, refer to the <<client-connecting>> section.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Operations
|
||||
|
||||
Time to use Elasticsearch! This section walks you through the basic, and most
|
||||
important, operations of Elasticsearch.
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Creating an index
|
||||
|
||||
This is how you create the `my_index` index:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
await client.indices.create({ index: 'my_index' })
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Indexing documents
|
||||
|
||||
This is a simple way of indexing a document:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
await client.index({
|
||||
index: 'my_index',
|
||||
id: 'my_document_id',
|
||||
document: {
|
||||
foo: 'foo',
|
||||
bar: 'bar',
|
||||
},
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Getting documents
|
||||
|
||||
You can get documents by using the following code:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
await client.get({
|
||||
index: 'my_index',
|
||||
id: 'my_document_id',
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Searching documents
|
||||
|
||||
This is how you can create a single match query with the client:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
await client.search({
|
||||
query: {
|
||||
match: {
|
||||
foo: 'foo'
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Updating documents
|
||||
|
||||
This is how you can update a document, for example to add a new field:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
await client.update({
|
||||
index: 'my_index',
|
||||
id: 'my_document_id',
|
||||
doc: {
|
||||
foo: 'bar',
|
||||
new_field: 'new value'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Deleting documents
|
||||
|
||||
[source,js]
|
||||
----
|
||||
await client.delete({
|
||||
index: 'my_index',
|
||||
id: 'my_document_id',
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Deleting an index
|
||||
|
||||
[source,js]
|
||||
----
|
||||
await client.indices.delete({ index: 'my_index' })
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
== Further reading
|
||||
|
||||
* Use <<client-helpers>> for a more comfortable experience with the APIs.
|
||||
* For an elaborate example of how to ingest data into Elastic Cloud,
|
||||
refer to {cloud}/ec-getting-started-node-js.html[this page].
|
||||
@ -1,748 +0,0 @@
|
||||
[[client-helpers]]
|
||||
== Client helpers
|
||||
|
||||
The client comes with an handy collection of helpers to give you a more
|
||||
comfortable experience with some APIs.
|
||||
|
||||
CAUTION: The client helpers are experimental, and the API may change in the next
|
||||
minor releases. The helpers will not work in any Node.js version lower than 10.
|
||||
|
||||
|
||||
[discrete]
|
||||
[[bulk-helper]]
|
||||
=== Bulk helper
|
||||
|
||||
~Added~ ~in~ ~`v7.7.0`~
|
||||
|
||||
Running bulk requests can be complex due to the shape of the API, this helper
|
||||
aims to provide a nicer developer experience around the Bulk API.
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Usage
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { createReadStream } = require('fs')
|
||||
const split = require('split2')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: createReadStream('./dataset.ndjson').pipe(split()),
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
// {
|
||||
// total: number,
|
||||
// failed: number,
|
||||
// retry: number,
|
||||
// successful: number,
|
||||
// time: number,
|
||||
// bytes: number,
|
||||
// aborted: boolean
|
||||
// }
|
||||
----
|
||||
|
||||
To create a new instance of the Bulk helper, access it as shown in the example
|
||||
above, the configuration options are:
|
||||
[cols=2*]
|
||||
|===
|
||||
|`datasource`
|
||||
a|An array, async generator or a readable stream with the data you need to index/create/update/delete.
|
||||
It can be an array of strings or objects, but also a stream of json strings or JavaScript objects. +
|
||||
If it is a stream, we recommend to use the https://www.npmjs.com/package/split2[`split2`] package, that splits the stream on new lines delimiters. +
|
||||
This parameter is mandatory.
|
||||
[source,js]
|
||||
----
|
||||
const { createReadStream } = require('fs')
|
||||
const split = require('split2')
|
||||
const b = client.helpers.bulk({
|
||||
// if you just use split(), the data will be used as array of strings
|
||||
datasource: createReadStream('./dataset.ndjson').pipe(split())
|
||||
// if you need to manipulate the data, you can pass JSON.parse to split
|
||||
datasource: createReadStream('./dataset.ndjson').pipe(split(JSON.parse))
|
||||
})
|
||||
----
|
||||
|
||||
|`onDocument`
|
||||
a|A function that is called for each document of the datasource. Inside this function you can manipulate the document and you must return the operation you want to execute with the document. Look at the link:{ref}/docs-bulk.html[Bulk API documentation] to see the supported operations. +
|
||||
This parameter is mandatory.
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|`onDrop`
|
||||
a|A function that is called for everytime a document can't be indexed and it has reached the maximum amount of retries.
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
onDrop (doc) {
|
||||
console.log(doc)
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|`onSuccess`
|
||||
a|A function that is called for each successful operation in the bulk request, which includes the result from Elasticsearch along with the original document that was sent, or `null` for delete operations.
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
onSuccess ({ result, document }) {
|
||||
console.log(`SUCCESS: Document ${result.index._id} indexed to ${result.index._index}`)
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|`flushBytes`
|
||||
a|The size of the bulk body in bytes to reach before to send it. Default of 5MB. +
|
||||
_Default:_ `5000000`
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
flushBytes: 1000000
|
||||
})
|
||||
----
|
||||
|
||||
|`flushInterval`
|
||||
a|How much time (in milliseconds) the helper waits before flushing the body from the last document read. +
|
||||
_Default:_ `30000`
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
flushInterval: 30000
|
||||
})
|
||||
----
|
||||
|
||||
|`concurrency`
|
||||
a|How many request is executed at the same time. +
|
||||
_Default:_ `5`
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
concurrency: 10
|
||||
})
|
||||
----
|
||||
|
||||
|`retries`
|
||||
a|How many times a document is retried before to call the `onDrop` callback. +
|
||||
_Default:_ Client max retries.
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
retries: 3
|
||||
})
|
||||
----
|
||||
|
||||
|`wait`
|
||||
a|How much time to wait before retries in milliseconds. +
|
||||
_Default:_ 5000.
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
wait: 3000
|
||||
})
|
||||
----
|
||||
|
||||
|`refreshOnCompletion`
|
||||
a|If `true`, at the end of the bulk operation it runs a refresh on all indices or on the specified indices. +
|
||||
_Default:_ false.
|
||||
[source,js]
|
||||
----
|
||||
const b = client.helpers.bulk({
|
||||
refreshOnCompletion: true
|
||||
// or
|
||||
refreshOnCompletion: 'index-name'
|
||||
})
|
||||
----
|
||||
|
||||
|===
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Supported operations
|
||||
|
||||
|
||||
[discrete]
|
||||
===== Index
|
||||
|
||||
[source,js]
|
||||
----
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
===== Create
|
||||
|
||||
[source,js]
|
||||
----
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
create: { _index: 'my-index', _id: doc.id }
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
===== Update
|
||||
|
||||
[source,js]
|
||||
----
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
// Note that the update operation requires you to return
|
||||
// an array, where the first element is the action, while
|
||||
// the second are the document option
|
||||
return [
|
||||
{ update: { _index: 'my-index', _id: doc.id } },
|
||||
{ doc_as_upsert: true }
|
||||
]
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
===== Delete
|
||||
|
||||
[source,js]
|
||||
----
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
delete: { _index: 'my-index', _id: doc.id }
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Abort a bulk operation
|
||||
|
||||
If needed, you can abort a bulk operation at any time. The bulk helper returns a
|
||||
https://promisesaplus.com/[thenable], which has an `abort` method.
|
||||
|
||||
NOTE: The abort method stops the execution of the bulk operation, but if you
|
||||
are using a concurrency higher than one, the operations that are already running
|
||||
will not be stopped.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { createReadStream } = require('fs')
|
||||
const split = require('split2')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const b = client.helpers.bulk({
|
||||
datasource: createReadStream('./dataset.ndjson').pipe(split()),
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
b.abort()
|
||||
}
|
||||
})
|
||||
|
||||
console.log(await b)
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Passing custom options to the Bulk API
|
||||
|
||||
You can pass any option supported by the link:
|
||||
{ref}/docs-bulk.html#docs-bulk-api-query-params[Bulk API] to the helper, and the
|
||||
helper uses those options in conjunction with the Bulk API call.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: [...],
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
},
|
||||
pipeline: 'my-pipeline'
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Usage with an async generator
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
async function * generator () {
|
||||
const dataset = [
|
||||
{ user: 'jon', age: 23 },
|
||||
{ user: 'arya', age: 18 },
|
||||
{ user: 'tyrion', age: 39 }
|
||||
]
|
||||
for (const doc of dataset) {
|
||||
yield doc
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: generator(),
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
----
|
||||
|
||||
[discrete]
|
||||
==== Modifying a document before operation
|
||||
|
||||
~Added~ ~in~ ~`v8.8.2`~
|
||||
|
||||
If you need to modify documents in your datasource before it is sent to Elasticsearch, you can return an array in the `onDocument` function rather than an operation object. The first item in the array must be the operation object, and the second item must be the document or partial document object as you'd like it to be sent to Elasticsearch.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: [...],
|
||||
onDocument (doc) {
|
||||
return [
|
||||
{ index: { _index: 'my-index' } },
|
||||
{ ...doc, favorite_color: 'mauve' },
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[multi-search-helper]]
|
||||
=== Multi search helper
|
||||
|
||||
~Added~ ~in~ ~`v7.8.0`~
|
||||
|
||||
If you send search request at a high rate, this helper might be useful
|
||||
for you. It uses the multi search API under the hood to batch the requests
|
||||
and improve the overall performances of your application. The `result` exposes a
|
||||
`documents` property as well, which allows you to access directly the hits
|
||||
sources.
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Usage
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'javascript' } } }
|
||||
)
|
||||
.then(result => console.log(result.body)) // or result.documents
|
||||
.catch(err => console.error(err))
|
||||
----
|
||||
|
||||
To create a new instance of the multi search (msearch) helper, you should access
|
||||
it as shown in the example above, the configuration options are:
|
||||
[cols=2*]
|
||||
|===
|
||||
|`operations`
|
||||
a|How many search operations should be sent in a single msearch request. +
|
||||
_Default:_ `5`
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
operations: 10
|
||||
})
|
||||
----
|
||||
|
||||
|`flushInterval`
|
||||
a|How much time (in milliseconds) the helper waits before flushing the operations from the last operation read. +
|
||||
_Default:_ `500`
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
flushInterval: 500
|
||||
})
|
||||
----
|
||||
|
||||
|`concurrency`
|
||||
a|How many request is executed at the same time. +
|
||||
_Default:_ `5`
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
concurrency: 10
|
||||
})
|
||||
----
|
||||
|
||||
|`retries`
|
||||
a|How many times an operation is retried before to resolve the request. An operation is retried only in case of a 429 error. +
|
||||
_Default:_ Client max retries.
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
retries: 3
|
||||
})
|
||||
----
|
||||
|
||||
|`wait`
|
||||
a|How much time to wait before retries in milliseconds. +
|
||||
_Default:_ 5000.
|
||||
[source,js]
|
||||
----
|
||||
const m = client.helpers.msearch({
|
||||
wait: 3000
|
||||
})
|
||||
----
|
||||
|
||||
|===
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Stopping the msearch helper
|
||||
|
||||
If needed, you can stop an msearch processor at any time. The msearch helper
|
||||
returns a https://promisesaplus.com/[thenable], which has an `stop` method.
|
||||
|
||||
If you are creating multiple msearch helpers instances and using them for a
|
||||
limitied period of time, remember to always use the `stop` method once you have
|
||||
finished using them, otherwise your application will start leaking memory.
|
||||
|
||||
The `stop` method accepts an optional error, that will be dispatched every
|
||||
subsequent search request.
|
||||
|
||||
NOTE: The stop method stops the execution of the msearch processor, but if
|
||||
you are using a concurrency higher than one, the operations that are already
|
||||
running will not be stopped.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'javascript' } } }
|
||||
)
|
||||
.then(result => console.log(result.body))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'ruby' } } }
|
||||
)
|
||||
.then(result => console.log(result.body))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
setImmediate(() => m.stop())
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[search-helper]]
|
||||
=== Search helper
|
||||
|
||||
~Added~ ~in~ ~`v7.7.0`~
|
||||
|
||||
A simple wrapper around the search API. Instead of returning the entire `result`
|
||||
object it returns only the search documents source. For improving the
|
||||
performances, this helper automatically adds `filter_path=hits.hits._source` to
|
||||
the query string.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const documents = await client.helpers.search({
|
||||
index: 'stackoverflow',
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for (const doc of documents) {
|
||||
console.log(doc)
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[scroll-search-helper]]
|
||||
=== Scroll search helper
|
||||
|
||||
~Added~ ~in~ ~`v7.7.0`~
|
||||
|
||||
This helpers offers a simple and intuitive way to use the scroll search API.
|
||||
Once called, it returns an
|
||||
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function[async iterator]
|
||||
which can be used in conjuction with a for-await...of. It handles automatically
|
||||
the `429` error and uses the `maxRetries` option of the client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'stackoverflow',
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
console.log(result)
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Clear a scroll search
|
||||
|
||||
If needed, you can clear a scroll search by calling `result.clear()`:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
for await (const result of scrollSearch) {
|
||||
if (condition) {
|
||||
await result.clear()
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Quickly getting the documents
|
||||
|
||||
If you only need the documents from the result of a scroll search, you can
|
||||
access them via `result.documents`:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
for await (const result of scrollSearch) {
|
||||
console.log(result.documents)
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
[discrete]
|
||||
[[scroll-documents-helper]]
|
||||
=== Scroll documents helper
|
||||
|
||||
~Added~ ~in~ ~`v7.7.0`~
|
||||
|
||||
It works in the same way as the scroll search helper, but it returns only the
|
||||
documents instead. Note, every loop cycle returns a single document, and you
|
||||
can't use the `clear` method. For improving the performances, this helper
|
||||
automatically adds `filter_path=hits.hits._source` to the query string.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: 'stackoverflow',
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for await (const doc of scrollSearch) {
|
||||
console.log(doc)
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[esql-helper]]
|
||||
=== ES|QL helper
|
||||
|
||||
ES|QL queries can return their results in {ref}/esql-rest.html#esql-rest-format[several formats].
|
||||
The default JSON format returned by ES|QL queries contains arrays of values
|
||||
for each row, with column names and types returned separately:
|
||||
|
||||
[discrete]
|
||||
==== Usage
|
||||
|
||||
[discrete]
|
||||
===== `toRecords`
|
||||
|
||||
~Added~ ~in~ ~`v8.14.0`~
|
||||
|
||||
The default JSON format returned by ES|QL queries contains arrays of values
|
||||
for each row, with column names and types returned separately:
|
||||
|
||||
[source,json]
|
||||
----
|
||||
{
|
||||
"columns": [
|
||||
{ "name": "@timestamp", "type": "date" },
|
||||
{ "name": "client_ip", "type": "ip" },
|
||||
{ "name": "event_duration", "type": "long" },
|
||||
{ "name": "message", "type": "keyword" }
|
||||
],
|
||||
"values": [
|
||||
[
|
||||
"2023-10-23T12:15:03.360Z",
|
||||
"172.21.2.162",
|
||||
3450233,
|
||||
"Connected to 10.1.0.3"
|
||||
],
|
||||
[
|
||||
"2023-10-23T12:27:28.948Z",
|
||||
"172.21.2.113",
|
||||
2764889,
|
||||
"Connected to 10.1.0.2"
|
||||
]
|
||||
]
|
||||
}
|
||||
----
|
||||
|
||||
In many cases, it's preferable to operate on an array of objects, one object per row,
|
||||
rather than an array of arrays. The ES|QL `toRecords` helper converts row data into objects.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
await client.helpers
|
||||
.esql({ query: 'FROM sample_data | LIMIT 2' })
|
||||
.toRecords()
|
||||
// =>
|
||||
// {
|
||||
// "columns": [
|
||||
// { "name": "@timestamp", "type": "date" },
|
||||
// { "name": "client_ip", "type": "ip" },
|
||||
// { "name": "event_duration", "type": "long" },
|
||||
// { "name": "message", "type": "keyword" }
|
||||
// ],
|
||||
// "records": [
|
||||
// {
|
||||
// "@timestamp": "2023-10-23T12:15:03.360Z",
|
||||
// "client_ip": "172.21.2.162",
|
||||
// "event_duration": 3450233,
|
||||
// "message": "Connected to 10.1.0.3"
|
||||
// },
|
||||
// {
|
||||
// "@timestamp": "2023-10-23T12:27:28.948Z",
|
||||
// "client_ip": "172.21.2.113",
|
||||
// "event_duration": 2764889,
|
||||
// "message": "Connected to 10.1.0.2"
|
||||
// },
|
||||
// ]
|
||||
// }
|
||||
----
|
||||
|
||||
In TypeScript, you can declare the type that `toRecords` returns:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
type EventLog = {
|
||||
'@timestamp': string,
|
||||
client_ip: string,
|
||||
event_duration: number,
|
||||
message: string,
|
||||
}
|
||||
|
||||
const result = await client.helpers
|
||||
.esql({ query: 'FROM sample_data | LIMIT 2' })
|
||||
.toRecords<EventLog>()
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== `toArrowReader`
|
||||
|
||||
~Added~ ~in~ ~`v8.16.0`~
|
||||
|
||||
ES|QL can return results in multiple binary formats, including https://arrow.apache.org/[Apache Arrow]'s streaming format. Because it is a very efficient format to read, it can be valuable for performing high-performance in-memory analytics. And, because the response is streamed as batches of records, it can be used to produce aggregations and other calculations on larger-than-memory data sets.
|
||||
|
||||
`toArrowReader` returns a https://arrow.apache.org/docs/js/classes/Arrow_dom.RecordBatchReader.html[`RecordBatchStreamReader`].
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
const reader = await client.helpers
|
||||
.esql({ query: 'FROM sample_data' })
|
||||
.toArrowReader()
|
||||
|
||||
// print each record as JSON
|
||||
for (const recordBatch of reader) {
|
||||
for (const record of recordBatch) {
|
||||
console.log(record.toJSON())
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
[discrete]
|
||||
===== `toArrowTable`
|
||||
|
||||
~Added~ ~in~ ~`v8.16.0`~
|
||||
|
||||
If you would like to pull the entire data set in Arrow format but without streaming, you can use the `toArrowTable` helper to get a https://arrow.apache.org/docs/js/classes/Arrow_dom.Table.html[Table] back instead.
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
const table = await client.helpers
|
||||
.esql({ query: 'FROM sample_data' })
|
||||
.toArrowTable()
|
||||
|
||||
console.log(table.toArray())
|
||||
----
|
||||
@ -1,24 +0,0 @@
|
||||
= Elasticsearch JavaScript Client
|
||||
|
||||
include::{asciidoc-dir}/../../shared/versions/stack/{source_branch}.asciidoc[]
|
||||
include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
|
||||
|
||||
include::introduction.asciidoc[]
|
||||
include::getting-started.asciidoc[]
|
||||
include::changelog.asciidoc[]
|
||||
include::installation.asciidoc[]
|
||||
include::connecting.asciidoc[]
|
||||
include::configuration.asciidoc[]
|
||||
include::basic-config.asciidoc[]
|
||||
include::advanced-config.asciidoc[]
|
||||
include::child.asciidoc[]
|
||||
include::testing.asciidoc[]
|
||||
include::integrations.asciidoc[]
|
||||
include::observability.asciidoc[]
|
||||
include::transport.asciidoc[]
|
||||
include::typescript.asciidoc[]
|
||||
include::reference.asciidoc[]
|
||||
include::examples/index.asciidoc[]
|
||||
include::helpers.asciidoc[]
|
||||
include::redirects.asciidoc[]
|
||||
include::timeout-best-practices.asciidoc[]
|
||||
@ -1,116 +0,0 @@
|
||||
[[installation]]
|
||||
== Installation
|
||||
|
||||
This page guides you through the installation process of the client.
|
||||
|
||||
To install the latest version of the client, run the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install @elastic/elasticsearch
|
||||
----
|
||||
|
||||
To install a specific major version of the client, run the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install @elastic/elasticsearch@<major>
|
||||
----
|
||||
|
||||
To learn more about the supported major versions, please refer to the
|
||||
<<js-compatibility-matrix>>.
|
||||
|
||||
[discrete]
|
||||
[[nodejs-support]]
|
||||
=== Node.js support
|
||||
|
||||
NOTE: The minimum supported version of Node.js is `v18`.
|
||||
|
||||
The client versioning follows the {stack} versioning, this means that
|
||||
major, minor, and patch releases are done following a precise schedule that
|
||||
often does not coincide with the https://nodejs.org/en/about/releases/[Node.js release] times.
|
||||
|
||||
To avoid support insecure and unsupported versions of Node.js, the
|
||||
client *will drop the support of EOL versions of Node.js between minor releases*.
|
||||
Typically, as soon as a Node.js version goes into EOL, the client will continue
|
||||
to support that version for at least another minor release. If you are using the client
|
||||
with a version of Node.js that will be unsupported soon, you will see a warning
|
||||
in your logs (the client will start logging the warning with two minors in advance).
|
||||
|
||||
Unless you are *always* using a supported version of Node.js,
|
||||
we recommend defining the client dependency in your
|
||||
`package.json` with the `~` instead of `^`. In this way, you will lock the
|
||||
dependency on the minor release and not the major. (for example, `~7.10.0` instead
|
||||
of `^7.10.0`).
|
||||
|
||||
[%header,cols=3*]
|
||||
|===
|
||||
|Node.js Version
|
||||
|Node.js EOL date
|
||||
|End of support
|
||||
|
||||
|`8.x`
|
||||
|December 2019
|
||||
|`7.11` (early 2021)
|
||||
|
||||
|`10.x`
|
||||
|April 2021
|
||||
|`7.12` (mid 2021)
|
||||
|
||||
|`12.x`
|
||||
|April 2022
|
||||
|`8.2` (early 2022)
|
||||
|
||||
|`14.x`
|
||||
|April 2023
|
||||
|`8.8` (early 2023)
|
||||
|
||||
|`16.x`
|
||||
|September 2023
|
||||
|`8.11` (late 2023)
|
||||
|===
|
||||
|
||||
[discrete]
|
||||
[[js-compatibility-matrix]]
|
||||
=== Compatibility matrix
|
||||
|
||||
Language clients are forward compatible; meaning that clients support
|
||||
communicating with greater or equal minor versions of {es} without breaking. It
|
||||
does not mean that the client automatically supports new features of newer {es}
|
||||
versions; it is only possible after a release of a new client version. For
|
||||
example, a 8.12 client version won't automatically support the new features of
|
||||
the 8.13 version of {es}, the 8.13 client version is required for that.
|
||||
{es} language clients are only backwards compatible with default distributions
|
||||
and without guarantees made.
|
||||
|
||||
[%header,cols=3*]
|
||||
|===
|
||||
|{es} Version
|
||||
|Client Version
|
||||
|Supported
|
||||
|
||||
|`8.x`
|
||||
|`8.x`
|
||||
|`8.x`
|
||||
|
||||
|`7.x`
|
||||
|`7.x`
|
||||
|`7.17`
|
||||
|
||||
|`6.x`
|
||||
|`6.x`
|
||||
|
|
||||
|
||||
|`5.x`
|
||||
|`5.x`
|
||||
|
|
||||
|===
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Browser
|
||||
|
||||
WARNING: There is no official support for the browser environment. It exposes
|
||||
your {es} instance to everyone, which could lead to security issues. We
|
||||
recommend you to write a lightweight proxy that uses this client instead,
|
||||
you can see a proxy example https://github.com/elastic/elasticsearch-js/tree/master/docs/examples/proxy[here].
|
||||
@ -1,8 +0,0 @@
|
||||
[[integrations]]
|
||||
== Integrations
|
||||
|
||||
The Client offers the following integration options for you:
|
||||
|
||||
* <<observability>>
|
||||
* <<transport>>
|
||||
* <<typescript>>
|
||||
@ -1,17 +0,0 @@
|
||||
["appendix",role="exclude",id="redirects"]
|
||||
= Deleted pages
|
||||
|
||||
The following pages have moved or been deleted.
|
||||
|
||||
[role="exclude",id="auth-reference"]
|
||||
== Authentication
|
||||
|
||||
This page has moved. See <<client-connecting>>.
|
||||
|
||||
[role="exclude",id="breaking-changes"]
|
||||
== Breaking changes
|
||||
|
||||
For information about migrating from the legacy elasticsearch.js client to the
|
||||
new Elasticsearch JavaScript client, refer to the
|
||||
https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/7.17/breaking-changes.html[7.17
|
||||
JavaScript client migration guide].
|
||||
15988
docs/reference.asciidoc
15988
docs/reference.asciidoc
File diff suppressed because it is too large
Load Diff
@ -1,25 +1,27 @@
|
||||
[[advanced-config]]
|
||||
=== Advanced configuration
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/advanced-config.html
|
||||
---
|
||||
|
||||
If you need to customize the client behavior heavily, you are in the right
|
||||
place! The client enables you to customize the following internals:
|
||||
# Advanced configuration [advanced-config]
|
||||
|
||||
If you need to customize the client behavior heavily, you are in the right place! The client enables you to customize the following internals:
|
||||
|
||||
* `ConnectionPool` class
|
||||
* `Connection` class
|
||||
* `Serializer` class
|
||||
|
||||
NOTE: For information about the `Transport` class, refer to <<transport>>.
|
||||
::::{note}
|
||||
For information about the `Transport` class, refer to [Transport](/reference/transport.md).
|
||||
::::
|
||||
|
||||
|
||||
[discrete]
|
||||
==== `ConnectionPool`
|
||||
|
||||
This class is responsible for keeping in memory all the {es} Connection that you
|
||||
are using. There is a single Connection for every node. The connection pool
|
||||
handles the resurrection strategies and the updates of the pool.
|
||||
## `ConnectionPool` [_connectionpool]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
This class is responsible for keeping in memory all the {{es}} Connection that you are using. There is a single Connection for every node. The connection pool handles the resurrection strategies and the updates of the pool.
|
||||
|
||||
```js
|
||||
const { Client, ConnectionPool } = require('@elastic/elasticsearch')
|
||||
|
||||
class MyConnectionPool extends ConnectionPool {
|
||||
@ -34,19 +36,14 @@ const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
----
|
||||
```
|
||||
|
||||
|
||||
[discrete]
|
||||
==== `Connection`
|
||||
## `Connection` [_connection]
|
||||
|
||||
This class represents a single node, it holds every information we have on the
|
||||
node, such as roles, id, URL, custom headers and so on. The actual HTTP request
|
||||
is performed here, this means that if you want to swap the default HTTP client
|
||||
(Node.js core), you should override the `request` method of this class.
|
||||
This class represents a single node, it holds every information we have on the node, such as roles, id, URL, custom headers and so on. The actual HTTP request is performed here, this means that if you want to swap the default HTTP client (Node.js core), you should override the `request` method of this class.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client, BaseConnection } = require('@elastic/elasticsearch')
|
||||
|
||||
class MyConnection extends BaseConnection {
|
||||
@ -60,22 +57,19 @@ const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
----
|
||||
```
|
||||
|
||||
|
||||
[discrete]
|
||||
==== `Serializer`
|
||||
## `Serializer` [_serializer]
|
||||
|
||||
This class is responsible for the serialization of every request, it offers the
|
||||
following methods:
|
||||
This class is responsible for the serialization of every request, it offers the following methods:
|
||||
|
||||
* `serialize(object: any): string;` serializes request objects.
|
||||
* `deserialize(json: string): any;` deserializes response strings.
|
||||
* `ndserialize(array: any[]): string;` serializes bulk request objects.
|
||||
* `qserialize(object: any): string;` serializes request query parameters.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client, Serializer } = require('@elastic/elasticsearch')
|
||||
|
||||
class MySerializer extends Serializer {
|
||||
@ -89,11 +83,10 @@ const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
----
|
||||
```
|
||||
|
||||
[discrete]
|
||||
[[redaction]]
|
||||
==== Redaction of potentially sensitive data
|
||||
|
||||
## Redaction of potentially sensitive data [redaction]
|
||||
|
||||
When the client raises an `Error` that originated at the HTTP layer, like a `ConnectionError` or `TimeoutError`, a `meta` object is often attached to the error object that includes metadata useful for debugging, like request and response information. Because this can include potentially sensitive data, like authentication secrets in an `Authorization` header, the client takes measures to redact common sources of sensitive data when this metadata is attached and serialized.
|
||||
|
||||
@ -101,8 +94,7 @@ If your configuration requires extra headers or other configurations that may in
|
||||
|
||||
By default, the `redaction` option is set to `{ type: 'replace' }`, which recursively searches for sensitive key names, case insensitive, and replaces their values with the string `[redacted]`.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
@ -115,12 +107,11 @@ try {
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers.authorization) // prints "[redacted]"
|
||||
}
|
||||
----
|
||||
```
|
||||
|
||||
If you would like to redact additional properties, you can include additional key names to search and replace:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
@ -138,12 +129,11 @@ try {
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers['X-My-Secret-Password']) // prints "[redacted]"
|
||||
}
|
||||
----
|
||||
```
|
||||
|
||||
Alternatively, if you know you're not going to use the metadata at all, setting the redaction type to `remove` will remove all optional sources of potentially sensitive data entirely, or replacing them with `null` for required properties.
|
||||
Alternatively, if you know you’re not going to use the metadata at all, setting the redaction type to `remove` will remove all optional sources of potentially sensitive data entirely, or replacing them with `null` for required properties.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
@ -157,14 +147,16 @@ try {
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers) // undefined
|
||||
}
|
||||
----
|
||||
```
|
||||
|
||||
Finally, if you prefer to turn off redaction altogether, perhaps while debugging on a local developer environment, you can set the redaction type to `off`. This will revert the client to pre-8.11.0 behavior, where basic redaction is only performed during common serialization methods like `console.log` and `JSON.stringify`.
|
||||
|
||||
WARNING: Setting `redaction.type` to `off` is not recommended in production environments.
|
||||
::::{warning}
|
||||
Setting `redaction.type` to `off` is not recommended in production environments.
|
||||
::::
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
@ -178,18 +170,10 @@ try {
|
||||
} catch (err) {
|
||||
console.log(err.meta.meta.request.options.headers.authorization) // the actual header value will be logged
|
||||
}
|
||||
----
|
||||
```
|
||||
|
||||
[discrete]
|
||||
==== Migrate to v8
|
||||
|
||||
The Node.js client can be configured to emit an HTTP header
|
||||
`Accept: application/vnd.elasticsearch+json; compatible-with=7`
|
||||
which signals to Elasticsearch that the client is requesting
|
||||
`7.x` version of request and response bodies. This allows for
|
||||
upgrading from 7.x to 8.x version of Elasticsearch without upgrading
|
||||
everything at once. Elasticsearch should be upgraded first after
|
||||
the compatibility header is configured and clients should be upgraded
|
||||
second.
|
||||
To enable to setting, configure the environment variable
|
||||
`ELASTIC_CLIENT_APIVERSIONING` to `true`.
|
||||
## Migrate to v8 [_migrate_to_v8]
|
||||
|
||||
The Node.js client can be configured to emit an HTTP header `Accept: application/vnd.elasticsearch+json; compatible-with=7` which signals to Elasticsearch that the client is requesting `7.x` version of request and response bodies. This allows for upgrading from 7.x to 8.x version of Elasticsearch without upgrading everything at once. Elasticsearch should be upgraded first after the compatibility header is configured and clients should be upgraded second. To enable to setting, configure the environment variable `ELASTIC_CLIENT_APIVERSIONING` to `true`.
|
||||
|
||||
14377
docs/reference/api-reference.md
Normal file
14377
docs/reference/api-reference.md
Normal file
File diff suppressed because one or more lines are too long
@ -1,11 +1,13 @@
|
||||
[[as_stream_examples]]
|
||||
=== asStream
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/as_stream_examples.html
|
||||
---
|
||||
|
||||
Instead of getting the parsed body back, you will get the raw Node.js stream of
|
||||
data.
|
||||
# asStream [as_stream_examples]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
Instead of getting the parsed body back, you will get the raw Node.js stream of data.
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -66,13 +68,14 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
TIP: This can be useful if you need to pipe the {es}'s response to a proxy, or
|
||||
send it directly to another source.
|
||||
::::{tip}
|
||||
This can be useful if you need to pipe the {{es}}'s response to a proxy, or send it directly to another source.
|
||||
::::
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -96,4 +99,5 @@ fastify.post('/search/:index', async (req, reply) => {
|
||||
})
|
||||
|
||||
fastify.listen(3000)
|
||||
----
|
||||
```
|
||||
|
||||
51
docs/reference/basic-config.md
Normal file
51
docs/reference/basic-config.md
Normal file
@ -0,0 +1,51 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/basic-config.html
|
||||
---
|
||||
|
||||
# Basic configuration [basic-config]
|
||||
|
||||
This page shows you the possible basic configuration options that the clients offers.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
maxRetries: 5,
|
||||
sniffOnStart: true
|
||||
})
|
||||
```
|
||||
|
||||
| | |
|
||||
| --- | --- |
|
||||
| `node` or `nodes` | The Elasticsearch endpoint to use.<br> It can be a single string or an array of strings:<br><br>```js<br>node: 'http://localhost:9200'<br>```<br><br>Or it can be an object (or an array of objects) that represents the node:<br><br>```js<br>node: {<br> url: new URL('http://localhost:9200'),<br> tls: 'tls options',<br> agent: 'http agent options',<br> id: 'custom node id',<br> headers: { 'custom': 'headers' }<br> roles: {<br> master: true,<br> data: true,<br> ingest: true,<br> ml: false<br> }<br>}<br>```<br> |
|
||||
| `auth` | Your authentication data. You can use both basic authentication and [ApiKey](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-api-key).<br> See [Authentication](/reference/connecting.md#authentication) for more details.<br> *Default:* `null`<br><br>Basic authentication:<br><br>```js<br>auth: {<br> username: 'elastic',<br> password: 'changeme'<br>}<br>```<br><br>[ApiKey](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-api-key) authentication:<br><br>```js<br>auth: {<br> apiKey: 'base64EncodedKey'<br>}<br>```<br><br>Bearer authentication, useful for [service account tokens](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-service-token). Be aware that it does not handle automatic token refresh:<br><br>```js<br>auth: {<br> bearer: 'token'<br>}<br>```<br> |
|
||||
| `maxRetries` | `number` - Max number of retries for each request.<br>*Default:* `3` |
|
||||
| `requestTimeout` | `number` - Max request timeout in milliseconds for each request.<br>*Default:* No value |
|
||||
| `pingTimeout` | `number` - Max ping request timeout in milliseconds for each request.<br>*Default:* `3000` |
|
||||
| `sniffInterval` | `number, boolean` - Perform a sniff operation every `n` milliseconds. Sniffing might not be the best solution for you, take a look [here](https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how) to know more.<br>*Default:* `false` |
|
||||
| `sniffOnStart` | `boolean` - Perform a sniff once the client is started. Sniffing might not be the best solution for you, take a look [here](https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how) to know more.<br>*Default:* `false` |
|
||||
| `sniffEndpoint` | `string` - Endpoint to ping during a sniff.<br>*Default:* `'_nodes/_all/http'` |
|
||||
| `sniffOnConnectionFault` | `boolean` - Perform a sniff on connection fault. Sniffing might not be the best solution for you, take a look [here](https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how) to know more.<br>*Default:* `false` |
|
||||
| `resurrectStrategy` | `string` - Configure the node resurrection strategy.<br>*Options:* `'ping'`, `'optimistic'`, `'none'`<br>*Default:* `'ping'` |
|
||||
| `suggestCompression` | `boolean` - Adds `accept-encoding` header to every request.<br>*Default:* `false` |
|
||||
| `compression` | `string, boolean` - Enables gzip request body compression.<br>*Options:* `'gzip'`, `false`<br>*Default:* `false` |
|
||||
| `tls` | `http.SecureContextOptions` - tls [configuraton](https://nodejs.org/api/tls.md).<br>*Default:* `null` |
|
||||
| `proxy` | `string, URL` - If you are using an http(s) proxy, you can put its url here. The client will automatically handle the connection to it.<br> *Default:* `null`<br><br>```js<br>const client = new Client({<br> node: 'http://localhost:9200',<br> proxy: 'http://localhost:8080'<br>})<br><br>const client = new Client({<br> node: 'http://localhost:9200',<br> proxy: 'http://user:pwd@localhost:8080'<br>})<br>```<br> |
|
||||
| `agent` | `http.AgentOptions, function` - http agent [options](https://nodejs.org/api/http.md#http_new_agent_options), or a function that returns an actual http agent instance. If you want to disable the http agent use entirely (and disable the `keep-alive` feature), set the agent to `false`.<br> *Default:* `null`<br><br>```js<br>const client = new Client({<br> node: 'http://localhost:9200',<br> agent: { agent: 'options' }<br>})<br><br>const client = new Client({<br> node: 'http://localhost:9200',<br> // the function takes as parameter the option<br> // object passed to the Connection constructor<br> agent: (opts) => new CustomAgent()<br>})<br><br>const client = new Client({<br> node: 'http://localhost:9200',<br> // Disable agent and keep-alive<br> agent: false<br>})<br>```<br> |
|
||||
| `nodeFilter` | `function` - Filters which node not to use for a request.<br> *Default:*<br><br>```js<br>function defaultNodeFilter (node) {<br> // avoid master only nodes<br> if (node.roles.master === true &&<br> node.roles.data === false &&<br> node.roles.ingest === false) {<br> return false<br> }<br> return true<br>}<br>```<br> |
|
||||
| `nodeSelector` | `function` - custom selection strategy.<br> *Options:* `'round-robin'`, `'random'`, custom function<br> *Default:* `'round-robin'`<br> *Custom function example:*<br><br>```js<br>function nodeSelector (connections) {<br> const index = calculateIndex()<br> return connections[index]<br>}<br>```<br> |
|
||||
| `generateRequestId` | `function` - function to generate the request id for every request, it takes two parameters, the request parameters and options.<br> By default it generates an incremental integer for every request.<br> *Custom function example:*<br><br>```js<br>function generateRequestId (params, options) {<br> // your id generation logic<br> // must be syncronous<br> return 'id'<br>}<br>```<br> |
|
||||
| `name` | `string, symbol` - The name to identify the client instance in the events.<br>*Default:* `elasticsearch-js` |
|
||||
| `opaqueIdPrefix` | `string` - A string that will be use to prefix any `X-Opaque-Id` header.<br>See [`X-Opaque-Id` support](/reference/observability.md#_x_opaque_id_support) for more details.<br>_Default:* `null` |
|
||||
| `headers` | `object` - A set of custom headers to send in every request.<br>*Default:* `{}` |
|
||||
| `context` | `object` - A custom object that you can use for observability in your events.It will be merged with the API level context option.<br>*Default:* `null` |
|
||||
| `enableMetaHeader` | `boolean` - If true, adds an header named `'x-elastic-client-meta'`, containing some minimal telemetry data,such as the client and platform version.<br>*Default:* `true` |
|
||||
| `cloud` | `object` - Custom configuration for connecting to [Elastic Cloud](https://cloud.elastic.co). See [Authentication](/reference/connecting.md) for more details.<br> *Default:* `null`<br> *Cloud configuration example:*<br><br>```js<br>const client = new Client({<br> cloud: {<br> id: '<cloud-id>'<br> },<br> auth: {<br> username: 'elastic',<br> password: 'changeme'<br> }<br>})<br>```<br> |
|
||||
| `disablePrototypePoisoningProtection` | `boolean`, `'proto'`, `'constructor'` - The client can protect you against prototype poisoning attacks. Read [this article](https://web.archive.org/web/20200319091159/https://hueniverse.com/square-brackets-are-the-enemy-ff5b9fd8a3e8?gi=184a27ee2a08) to learn more about this security concern. If needed, you can enable prototype poisoning protection entirely (`false`) or one of the two checks (`'proto'` or `'constructor'`). For performance reasons, it is disabled by default. Read the `secure-json-parse` [documentation](https://github.com/fastify/secure-json-parse) to learn more.<br>*Default:* `true` |
|
||||
| `caFingerprint` | `string` - If configured, verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied fingerprint. Only accepts SHA256 digest fingerprints.<br>*Default:* `null` |
|
||||
| `maxResponseSize` | `number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it’s higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENGTH<br>*Default:* `null` |
|
||||
| `maxCompressedResponseSize` | `number` - When configured, it verifies that the compressed response size is lower than the configured number, if it’s higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENGTH<br>*Default:* `null` |
|
||||
|
||||
@ -1,13 +1,18 @@
|
||||
[[bulk_examples]]
|
||||
=== Bulk
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/bulk_examples.html
|
||||
---
|
||||
|
||||
With the {jsclient}/api-reference.html#_bulk[`bulk` API], you can perform multiple index/delete operations in a
|
||||
single API call. The `bulk` API significantly increases indexing speed.
|
||||
# Bulk [bulk_examples]
|
||||
|
||||
NOTE: You can also use the <<bulk-helper,bulk helper>>.
|
||||
With the [`bulk` API](/reference/api-reference.md#_bulk), you can perform multiple index/delete operations in a single API call. The `bulk` API significantly increases indexing speed.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
::::{note}
|
||||
You can also use the [bulk helper](/reference/client-helpers.md#bulk-helper).
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
require('array.prototype.flatmap').shim()
|
||||
@ -90,4 +95,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
34
docs/reference/child.md
Normal file
34
docs/reference/child.md
Normal file
@ -0,0 +1,34 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/child.html
|
||||
---
|
||||
|
||||
# Creating a child client [child]
|
||||
|
||||
There are some use cases where you may need multiple instances of the client. You can easily do that by calling `new Client()` as many times as you need, but you will lose all the benefits of using one single client, such as the long living connections and the connection pool handling. To avoid this problem, the client offers a `child` API, which returns a new client instance that shares the connection pool with the parent client.
|
||||
|
||||
::::{note}
|
||||
The event emitter is shared between the parent and the child(ren). If you extend the parent client, the child client will have the same extensions, while if the child client adds an extension, the parent client will not be extended.
|
||||
::::
|
||||
|
||||
|
||||
You can pass to the `child` every client option you would pass to a normal client, but the connection pool specific options (`ssl`, `agent`, `pingTimeout`, `Connection`, and `resurrectStrategy`).
|
||||
|
||||
::::{warning}
|
||||
If you call `close` in any of the parent/child clients, every client will be closed.
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const child = client.child({
|
||||
headers: { 'x-foo': 'bar' },
|
||||
})
|
||||
|
||||
client.info().then(console.log, console.log)
|
||||
child.info().then(console.log, console.log)
|
||||
```
|
||||
532
docs/reference/client-helpers.md
Normal file
532
docs/reference/client-helpers.md
Normal file
@ -0,0 +1,532 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-helpers.html
|
||||
---
|
||||
|
||||
# Client helpers [client-helpers]
|
||||
|
||||
The client comes with an handy collection of helpers to give you a more comfortable experience with some APIs.
|
||||
|
||||
::::{warning}
|
||||
The client helpers are experimental, and the API may change in the next minor releases. The helpers will not work in any Node.js version lower than 10.
|
||||
::::
|
||||
|
||||
|
||||
|
||||
## Bulk helper [bulk-helper]
|
||||
|
||||
Added in `v7.7.0`
|
||||
|
||||
Running bulk requests can be complex due to the shape of the API, this helper aims to provide a nicer developer experience around the Bulk API.
|
||||
|
||||
|
||||
### Usage [_usage_3]
|
||||
|
||||
```js
|
||||
const { createReadStream } = require('fs')
|
||||
const split = require('split2')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: createReadStream('./dataset.ndjson').pipe(split()),
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
// {
|
||||
// total: number,
|
||||
// failed: number,
|
||||
// retry: number,
|
||||
// successful: number,
|
||||
// time: number,
|
||||
// bytes: number,
|
||||
// aborted: boolean
|
||||
// }
|
||||
```
|
||||
|
||||
To create a new instance of the Bulk helper, access it as shown in the example above, the configuration options are:
|
||||
|
||||
| | |
|
||||
| --- | --- |
|
||||
| `datasource` | An array, async generator or a readable stream with the data you need to index/create/update/delete. It can be an array of strings or objects, but also a stream of json strings or JavaScript objects.<br> If it is a stream, we recommend to use the [`split2`](https://www.npmjs.com/package/split2) package, that splits the stream on new lines delimiters.<br> This parameter is mandatory.<br><br>```js<br>const { createReadStream } = require('fs')<br>const split = require('split2')<br>const b = client.helpers.bulk({<br> // if you just use split(), the data will be used as array of strings<br> datasource: createReadStream('./dataset.ndjson').pipe(split())<br> // if you need to manipulate the data, you can pass JSON.parse to split<br> datasource: createReadStream('./dataset.ndjson').pipe(split(JSON.parse))<br>})<br>```<br> |
|
||||
| `onDocument` | A function that is called for each document of the datasource. Inside this function you can manipulate the document and you must return the operation you want to execute with the document. Look at the [Bulk API documentation](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-bulk) to see the supported operations.<br> This parameter is mandatory.<br><br>```js<br>const b = client.helpers.bulk({<br> onDocument (doc) {<br> return {<br> index: { _index: 'my-index' }<br> }<br> }<br>})<br>```<br> |
|
||||
| `onDrop` | A function that is called for everytime a document can’t be indexed and it has reached the maximum amount of retries.<br><br>```js<br>const b = client.helpers.bulk({<br> onDrop (doc) {<br> console.log(doc)<br> }<br>})<br>```<br> |
|
||||
| `onSuccess` | A function that is called for each successful operation in the bulk request, which includes the result from Elasticsearch along with the original document that was sent, or `null` for delete operations.<br><br>```js<br>const b = client.helpers.bulk({<br> onSuccess ({ result, document }) {<br> console.log(`SUCCESS: Document ${result.index._id} indexed to ${result.index._index}`)<br> }<br>})<br>```<br> |
|
||||
| `flushBytes` | The size of the bulk body in bytes to reach before to send it. Default of 5MB.<br> *Default:* `5000000`<br><br>```js<br>const b = client.helpers.bulk({<br> flushBytes: 1000000<br>})<br>```<br> |
|
||||
| `flushInterval` | How much time (in milliseconds) the helper waits before flushing the body from the last document read.<br> *Default:* `30000`<br><br>```js<br>const b = client.helpers.bulk({<br> flushInterval: 30000<br>})<br>```<br> |
|
||||
| `concurrency` | How many request is executed at the same time.<br> *Default:* `5`<br><br>```js<br>const b = client.helpers.bulk({<br> concurrency: 10<br>})<br>```<br> |
|
||||
| `retries` | How many times a document is retried before to call the `onDrop` callback.<br> *Default:* Client max retries.<br><br>```js<br>const b = client.helpers.bulk({<br> retries: 3<br>})<br>```<br> |
|
||||
| `wait` | How much time to wait before retries in milliseconds.<br> *Default:* 5000.<br><br>```js<br>const b = client.helpers.bulk({<br> wait: 3000<br>})<br>```<br> |
|
||||
| `refreshOnCompletion` | If `true`, at the end of the bulk operation it runs a refresh on all indices or on the specified indices.<br> *Default:* false.<br><br>```js<br>const b = client.helpers.bulk({<br> refreshOnCompletion: true<br> // or<br> refreshOnCompletion: 'index-name'<br>})<br>```<br> |
|
||||
|
||||
|
||||
### Supported operations [_supported_operations]
|
||||
|
||||
|
||||
#### Index [_index_2]
|
||||
|
||||
```js
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Create [_create_4]
|
||||
|
||||
```js
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
create: { _index: 'my-index', _id: doc.id }
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Update [_update_3]
|
||||
|
||||
```js
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
// Note that the update operation requires you to return
|
||||
// an array, where the first element is the action, while
|
||||
// the second are the document option
|
||||
return [
|
||||
{ update: { _index: 'my-index', _id: doc.id } },
|
||||
{ doc_as_upsert: true }
|
||||
]
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Delete [_delete_10]
|
||||
|
||||
```js
|
||||
client.helpers.bulk({
|
||||
datasource: myDatasource,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
delete: { _index: 'my-index', _id: doc.id }
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
### Abort a bulk operation [_abort_a_bulk_operation]
|
||||
|
||||
If needed, you can abort a bulk operation at any time. The bulk helper returns a [thenable](https://promisesaplus.com/), which has an `abort` method.
|
||||
|
||||
::::{note}
|
||||
The abort method stops the execution of the bulk operation, but if you are using a concurrency higher than one, the operations that are already running will not be stopped.
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
const { createReadStream } = require('fs')
|
||||
const split = require('split2')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const b = client.helpers.bulk({
|
||||
datasource: createReadStream('./dataset.ndjson').pipe(split()),
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
b.abort()
|
||||
}
|
||||
})
|
||||
|
||||
console.log(await b)
|
||||
```
|
||||
|
||||
|
||||
### Passing custom options to the Bulk API [_passing_custom_options_to_the_bulk_api]
|
||||
|
||||
You can pass any option supported by the link: [Bulk API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-bulk) to the helper, and the helper uses those options in conjunction with the Bulk API call.
|
||||
|
||||
```js
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: [...],
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
},
|
||||
pipeline: 'my-pipeline'
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
### Usage with an async generator [_usage_with_an_async_generator]
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
async function * generator () {
|
||||
const dataset = [
|
||||
{ user: 'jon', age: 23 },
|
||||
{ user: 'arya', age: 18 },
|
||||
{ user: 'tyrion', age: 39 }
|
||||
]
|
||||
for (const doc of dataset) {
|
||||
yield doc
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: generator(),
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'my-index' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
```
|
||||
|
||||
|
||||
### Modifying a document before operation [_modifying_a_document_before_operation]
|
||||
|
||||
Added in `v8.8.2`
|
||||
|
||||
If you need to modify documents in your datasource before it is sent to Elasticsearch, you can return an array in the `onDocument` function rather than an operation object. The first item in the array must be the operation object, and the second item must be the document or partial document object as you’d like it to be sent to Elasticsearch.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: [...],
|
||||
onDocument (doc) {
|
||||
return [
|
||||
{ index: { _index: 'my-index' } },
|
||||
{ ...doc, favorite_color: 'mauve' },
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
```
|
||||
|
||||
|
||||
## Multi search helper [multi-search-helper]
|
||||
|
||||
Added in `v7.8.0`
|
||||
|
||||
If you send search request at a high rate, this helper might be useful for you. It uses the multi search API under the hood to batch the requests and improve the overall performances of your application. The `result` exposes a `documents` property as well, which allows you to access directly the hits sources.
|
||||
|
||||
|
||||
### Usage [_usage_4]
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'javascript' } } }
|
||||
)
|
||||
.then(result => console.log(result.body)) // or result.documents
|
||||
.catch(err => console.error(err))
|
||||
```
|
||||
|
||||
To create a new instance of the multi search (msearch) helper, you should access it as shown in the example above, the configuration options are:
|
||||
|
||||
| | |
|
||||
| --- | --- |
|
||||
| `operations` | How many search operations should be sent in a single msearch request.<br> *Default:* `5`<br><br>```js<br>const m = client.helpers.msearch({<br> operations: 10<br>})<br>```<br> |
|
||||
| `flushInterval` | How much time (in milliseconds) the helper waits before flushing the operations from the last operation read.<br> *Default:* `500`<br><br>```js<br>const m = client.helpers.msearch({<br> flushInterval: 500<br>})<br>```<br> |
|
||||
| `concurrency` | How many request is executed at the same time.<br> *Default:* `5`<br><br>```js<br>const m = client.helpers.msearch({<br> concurrency: 10<br>})<br>```<br> |
|
||||
| `retries` | How many times an operation is retried before to resolve the request. An operation is retried only in case of a 429 error.<br> *Default:* Client max retries.<br><br>```js<br>const m = client.helpers.msearch({<br> retries: 3<br>})<br>```<br> |
|
||||
| `wait` | How much time to wait before retries in milliseconds.<br> *Default:* 5000.<br><br>```js<br>const m = client.helpers.msearch({<br> wait: 3000<br>})<br>```<br> |
|
||||
|
||||
|
||||
### Stopping the msearch helper [_stopping_the_msearch_helper]
|
||||
|
||||
If needed, you can stop an msearch processor at any time. The msearch helper returns a [thenable](https://promisesaplus.com/), which has an `stop` method.
|
||||
|
||||
If you are creating multiple msearch helpers instances and using them for a limitied period of time, remember to always use the `stop` method once you have finished using them, otherwise your application will start leaking memory.
|
||||
|
||||
The `stop` method accepts an optional error, that will be dispatched every subsequent search request.
|
||||
|
||||
::::{note}
|
||||
The stop method stops the execution of the msearch processor, but if you are using a concurrency higher than one, the operations that are already running will not be stopped.
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'javascript' } } }
|
||||
)
|
||||
.then(result => console.log(result.body))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
m.search(
|
||||
{ index: 'stackoverflow' },
|
||||
{ query: { match: { title: 'ruby' } } }
|
||||
)
|
||||
.then(result => console.log(result.body))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
setImmediate(() => m.stop())
|
||||
```
|
||||
|
||||
|
||||
## Search helper [search-helper]
|
||||
|
||||
Added in `v7.7.0`
|
||||
|
||||
A simple wrapper around the search API. Instead of returning the entire `result` object it returns only the search documents source. For improving the performances, this helper automatically adds `filter_path=hits.hits._source` to the query string.
|
||||
|
||||
```js
|
||||
const documents = await client.helpers.search({
|
||||
index: 'stackoverflow',
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for (const doc of documents) {
|
||||
console.log(doc)
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Scroll search helper [scroll-search-helper]
|
||||
|
||||
Added in `v7.7.0`
|
||||
|
||||
This helpers offers a simple and intuitive way to use the scroll search API. Once called, it returns an [async iterator](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) which can be used in conjuction with a for-await…of. It handles automatically the `429` error and uses the `maxRetries` option of the client.
|
||||
|
||||
```js
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'stackoverflow',
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
console.log(result)
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Clear a scroll search [_clear_a_scroll_search]
|
||||
|
||||
If needed, you can clear a scroll search by calling `result.clear()`:
|
||||
|
||||
```js
|
||||
for await (const result of scrollSearch) {
|
||||
if (condition) {
|
||||
await result.clear()
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Quickly getting the documents [_quickly_getting_the_documents]
|
||||
|
||||
If you only need the documents from the result of a scroll search, you can access them via `result.documents`:
|
||||
|
||||
```js
|
||||
for await (const result of scrollSearch) {
|
||||
console.log(result.documents)
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Scroll documents helper [scroll-documents-helper]
|
||||
|
||||
Added in `v7.7.0`
|
||||
|
||||
It works in the same way as the scroll search helper, but it returns only the documents instead. Note, every loop cycle returns a single document, and you can’t use the `clear` method. For improving the performances, this helper automatically adds `filter_path=hits.hits._source` to the query string.
|
||||
|
||||
```js
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: 'stackoverflow',
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for await (const doc of scrollSearch) {
|
||||
console.log(doc)
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## ES|QL helper [esql-helper]
|
||||
|
||||
ES|QL queries can return their results in [several formats](docs-content://explore-analyze/query-filter/languages/esql-rest.md#esql-rest-format). The default JSON format returned by ES|QL queries contains arrays of values for each row, with column names and types returned separately:
|
||||
|
||||
|
||||
### Usage [_usage_5]
|
||||
|
||||
|
||||
#### `toRecords` [_torecords]
|
||||
|
||||
Added in `v8.14.0`
|
||||
|
||||
The default JSON format returned by ES|QL queries contains arrays of values for each row, with column names and types returned separately:
|
||||
|
||||
```json
|
||||
{
|
||||
"columns": [
|
||||
{ "name": "@timestamp", "type": "date" },
|
||||
{ "name": "client_ip", "type": "ip" },
|
||||
{ "name": "event_duration", "type": "long" },
|
||||
{ "name": "message", "type": "keyword" }
|
||||
],
|
||||
"values": [
|
||||
[
|
||||
"2023-10-23T12:15:03.360Z",
|
||||
"172.21.2.162",
|
||||
3450233,
|
||||
"Connected to 10.1.0.3"
|
||||
],
|
||||
[
|
||||
"2023-10-23T12:27:28.948Z",
|
||||
"172.21.2.113",
|
||||
2764889,
|
||||
"Connected to 10.1.0.2"
|
||||
]
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
In many cases, it’s preferable to operate on an array of objects, one object per row, rather than an array of arrays. The ES|QL `toRecords` helper converts row data into objects.
|
||||
|
||||
```js
|
||||
await client.helpers
|
||||
.esql({ query: 'FROM sample_data | LIMIT 2' })
|
||||
.toRecords()
|
||||
// =>
|
||||
// {
|
||||
// "columns": [
|
||||
// { "name": "@timestamp", "type": "date" },
|
||||
// { "name": "client_ip", "type": "ip" },
|
||||
// { "name": "event_duration", "type": "long" },
|
||||
// { "name": "message", "type": "keyword" }
|
||||
// ],
|
||||
// "records": [
|
||||
// {
|
||||
// "@timestamp": "2023-10-23T12:15:03.360Z",
|
||||
// "client_ip": "172.21.2.162",
|
||||
// "event_duration": 3450233,
|
||||
// "message": "Connected to 10.1.0.3"
|
||||
// },
|
||||
// {
|
||||
// "@timestamp": "2023-10-23T12:27:28.948Z",
|
||||
// "client_ip": "172.21.2.113",
|
||||
// "event_duration": 2764889,
|
||||
// "message": "Connected to 10.1.0.2"
|
||||
// },
|
||||
// ]
|
||||
// }
|
||||
```
|
||||
|
||||
In TypeScript, you can declare the type that `toRecords` returns:
|
||||
|
||||
```ts
|
||||
type EventLog = {
|
||||
'@timestamp': string,
|
||||
client_ip: string,
|
||||
event_duration: number,
|
||||
message: string,
|
||||
}
|
||||
|
||||
const result = await client.helpers
|
||||
.esql({ query: 'FROM sample_data | LIMIT 2' })
|
||||
.toRecords<EventLog>()
|
||||
```
|
||||
|
||||
|
||||
#### `toArrowReader` [_toarrowreader]
|
||||
|
||||
Added in `v8.16.0`
|
||||
|
||||
ES|QL can return results in multiple binary formats, including [Apache Arrow](https://arrow.apache.org/)'s streaming format. Because it is a very efficient format to read, it can be valuable for performing high-performance in-memory analytics. And, because the response is streamed as batches of records, it can be used to produce aggregations and other calculations on larger-than-memory data sets.
|
||||
|
||||
`toArrowReader` returns a [`RecordBatchStreamReader`](https://arrow.apache.org/docs/js/classes/Arrow_dom.RecordBatchReader.md).
|
||||
|
||||
```ts
|
||||
const reader = await client.helpers
|
||||
.esql({ query: 'FROM sample_data' })
|
||||
.toArrowReader()
|
||||
|
||||
// print each record as JSON
|
||||
for (const recordBatch of reader) {
|
||||
for (const record of recordBatch) {
|
||||
console.log(record.toJSON())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
#### `toArrowTable` [_toarrowtable]
|
||||
|
||||
Added in `v8.16.0`
|
||||
|
||||
If you would like to pull the entire data set in Arrow format but without streaming, you can use the `toArrowTable` helper to get a [Table](https://arrow.apache.org/docs/js/classes/Arrow_dom.Table.md) back instead.
|
||||
|
||||
```ts
|
||||
const table = await client.helpers
|
||||
.esql({ query: 'FROM sample_data' })
|
||||
.toArrowTable()
|
||||
|
||||
console.log(table.toArray())
|
||||
```
|
||||
121
docs/reference/client-testing.md
Normal file
121
docs/reference/client-testing.md
Normal file
@ -0,0 +1,121 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-testing.html
|
||||
---
|
||||
|
||||
# Testing [client-testing]
|
||||
|
||||
Testing is one of the most important parts of developing an application. The client is very flexible when it comes to testing and is compatible with most testing frameworks (such as [`ava`](https://www.npmjs.com/package/ava), which is used in the examples below).
|
||||
|
||||
If you are using this client, you are most likely working with {{es}}, and one of the first issues you face is how to test your application. A perfectly valid solution is to use the real {{es}} instance for testing your application, but you would be doing an integration test, while you want a unit test. There are many ways to solve this problem, you could create the database with Docker, or use an in-memory compatible one, but if you are writing unit tests that can be easily parallelized this becomes quite uncomfortable. A different way of improving your testing experience while doing unit tests is to use a mock.
|
||||
|
||||
The client is designed to be easy to extend and adapt to your needs. Thanks to its internal architecture it allows you to change some specific components while keeping the rest of it working as usual. Each {{es}} official client is composed of the following components:
|
||||
|
||||
* `API layer`: every {{es}} API that you can call.
|
||||
* `Transport`: a component that takes care of preparing a request before sending it and handling all the retry and sniffing strategies.
|
||||
* `ConnectionPool`: {{es}} is a cluster and might have multiple nodes, the `ConnectionPool` takes care of them.
|
||||
* `Serializer`: A class with all the serialization strategies, from the basic JSON to the new line delimited JSON.
|
||||
* `Connection`: The actual HTTP library.
|
||||
|
||||
The best way to mock {{es}} with the official clients is to replace the `Connection` component since it has very few responsibilities and it does not interact with other internal components other than getting requests and returning responses.
|
||||
|
||||
|
||||
## `@elastic/elasticsearch-mock` [_elasticelasticsearch_mock]
|
||||
|
||||
Writing each time a mock for your test can be annoying and error-prone, so we have built a simple yet powerful mocking library specifically designed for this client, and you can install it with the following command:
|
||||
|
||||
```sh
|
||||
npm install @elastic/elasticsearch-mock --save-dev
|
||||
```
|
||||
|
||||
With this library you can create custom mocks for any request you can send to {{es}}. It offers a simple and intuitive API and it mocks only the HTTP layer, leaving the rest of the client working as usual.
|
||||
|
||||
Before showing all of its features, and what you can do with it, let’s see an example:
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const Mock = require('@elastic/elasticsearch-mock')
|
||||
|
||||
const mock = new Mock()
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
Connection: mock.getConnection()
|
||||
})
|
||||
|
||||
mock.add({
|
||||
method: 'GET',
|
||||
path: '/'
|
||||
}, () => {
|
||||
return { status: 'ok' }
|
||||
})
|
||||
|
||||
client.info().then(console.log, console.log)
|
||||
```
|
||||
|
||||
As you can see it works closely with the client itself, once you have created a new instance of the mock library you just need to call the mock.getConnection() method and pass its result to the Connection option of the client. From now on, every request is handled by the mock library, and the HTTP layer will never be touched. As a result, your test is significantly faster and you are able to easily parallelize them!
|
||||
|
||||
The library allows you to write both “strict” and “loose” mocks, which means that you can write a mock that handles a very specific request or be looser and handle a group of request, let’s see this in action:
|
||||
|
||||
```js
|
||||
mock.add({
|
||||
method: 'POST',
|
||||
path: '/indexName/_search'
|
||||
}, () => {
|
||||
return {
|
||||
hits: {
|
||||
total: { value: 1, relation: 'eq' },
|
||||
hits: [{ _source: { baz: 'faz' } }]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
mock.add({
|
||||
method: 'POST',
|
||||
path: '/indexName/_search',
|
||||
body: { query: { match: { foo: 'bar' } } }
|
||||
}, () => {
|
||||
return {
|
||||
hits: {
|
||||
total: { value: 0, relation: 'eq' },
|
||||
hits: []
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
In the example above, every search request gets the first response, while every search request that uses the query described in the second mock gets the second response.
|
||||
|
||||
You can also specify dynamic paths:
|
||||
|
||||
```js
|
||||
mock.add({
|
||||
method: 'GET',
|
||||
path: '/:index/_count'
|
||||
}, () => {
|
||||
return { count: 42 }
|
||||
})
|
||||
|
||||
client.count({ index: 'foo' }).then(console.log, console.log) // => { count: 42 }
|
||||
client.count({ index: 'bar' }).then(console.log, console.log) // => { count: 42 }
|
||||
```
|
||||
|
||||
And wildcards are supported as well.
|
||||
|
||||
Another very interesting use case is the ability to create a test that randomly fails to see how your code reacts to failures:
|
||||
|
||||
```js
|
||||
mock.add({
|
||||
method: 'GET',
|
||||
path: '/:index/_count'
|
||||
}, () => {
|
||||
if (Math.random() > 0.8) {
|
||||
return ResponseError({ body: {}, statusCode: 500 })
|
||||
} else {
|
||||
return { count: 42 }
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
We have seen how simple is mocking {{es}} and testing your application, you can find many more features and examples in the [module documentation](https://github.com/elastic/elasticsearch-js-mock).
|
||||
|
||||
19
docs/reference/configuration.md
Normal file
19
docs/reference/configuration.md
Normal file
@ -0,0 +1,19 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-configuration.html
|
||||
---
|
||||
|
||||
# Configuration [client-configuration]
|
||||
|
||||
The client is designed to be easily configured for your needs. In the following section, you can see the possible options that you can use to configure it.
|
||||
|
||||
* [Basic configuration](/reference/basic-config.md)
|
||||
* [Advanced configuration](/reference/advanced-config.md)
|
||||
* [Timeout best practices](docs-content://troubleshoot/elasticsearch/elasticsearch-client-javascript-api/nodejs.md)
|
||||
* [Creating a child client](/reference/child.md)
|
||||
* [Testing](/reference/client-testing.md)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
524
docs/reference/connecting.md
Normal file
524
docs/reference/connecting.md
Normal file
@ -0,0 +1,524 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/client-connecting.html
|
||||
---
|
||||
|
||||
# Connecting [client-connecting]
|
||||
|
||||
This page contains the information you need to connect and use the Client with {{es}}.
|
||||
|
||||
## Authentication [authentication]
|
||||
|
||||
This document contains code snippets to show you how to connect to various {{es}} providers.
|
||||
|
||||
|
||||
### Elastic Cloud [auth-ec]
|
||||
|
||||
If you are using [Elastic Cloud](https://www.elastic.co/cloud), the client offers an easy way to connect to it via the `cloud` option. You must pass the Cloud ID that you can find in the cloud console, then your username and password inside the `auth` option.
|
||||
|
||||
::::{note}
|
||||
When connecting to Elastic Cloud, the client will automatically enable both request and response compression by default, since it yields significant throughput improvements. Moreover, the client will also set the tls option `secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still override this option by configuring them.
|
||||
::::
|
||||
|
||||
|
||||
::::{important}
|
||||
Do not enable sniffing when using Elastic Cloud, since the nodes are behind a load balancer, Elastic Cloud will take care of everything for you. Take a look [here](https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how) to know more.
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
id: '<cloud-id>'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
## Connecting to a self-managed cluster [connect-self-managed-new]
|
||||
|
||||
By default {{es}} will start with security features like authentication and TLS enabled. To connect to the {{es}} cluster you’ll need to configure the Node.js {{es}} client to use HTTPS with the generated CA certificate in order to make requests successfully.
|
||||
|
||||
If you’re just getting started with {{es}} we recommend reading the documentation on [configuring](docs-content://deploy-manage/deploy/self-managed/configure-elasticsearch.md) and [starting {{es}}](docs-content://deploy-manage/maintenance/start-stop-services/start-stop-elasticsearch.md) to ensure your cluster is running as expected.
|
||||
|
||||
When you start {{es}} for the first time you’ll see a distinct block like the one below in the output from {{es}} (you may have to scroll up if it’s been a while):
|
||||
|
||||
```sh
|
||||
-> Elasticsearch security features have been automatically configured!
|
||||
-> Authentication is enabled and cluster connections are encrypted.
|
||||
|
||||
-> Password for the elastic user (reset with `bin/elasticsearch-reset-password -u elastic`):
|
||||
lhQpLELkjkrawaBoaz0Q
|
||||
|
||||
-> HTTP CA certificate SHA-256 fingerprint:
|
||||
a52dd93511e8c6045e21f16654b77c9ee0f34aea26d9f40320b531c474676228
|
||||
...
|
||||
```
|
||||
|
||||
Depending on the circumstances there are two options for verifying the HTTPS connection, either verifying with the CA certificate itself or via the HTTP CA certificate fingerprint.
|
||||
|
||||
|
||||
### TLS configuration [auth-tls]
|
||||
|
||||
The generated root CA certificate can be found in the `certs` directory in your {{es}} config location (`$ES_CONF_PATH/certs/http_ca.crt`). If you’re running {{es}} in Docker there is [additional documentation for retrieving the CA certificate](docs-content://deploy-manage/deploy/self-managed/install-elasticsearch-with-docker.md).
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, and the certificates used to sign these requests will be verified. To turn off certificate verification, you must specify an `tls` object in the top level config and set `rejectUnauthorized: false`. The default `tls` values are the same that Node.js’s [`tls.connect()`](https://nodejs.org/api/tls.md#tls_tls_connect_options_callback) uses.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
tls: {
|
||||
ca: fs.readFileSync('./http_ca.crt'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
### CA fingerprint [auth-ca-fingerprint]
|
||||
|
||||
You can configure the client to only trust certificates that are signed by a specific CA certificate (CA certificate pinning) by providing a `caFingerprint` option. This will verify that the fingerprint of the CA certificate that has signed the certificate of the server matches the supplied value. You must configure a SHA256 digest.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://example.com'
|
||||
auth: { ... },
|
||||
// the fingerprint (SHA256) of the CA certificate that is used to sign
|
||||
// the certificate that the Elasticsearch node presents for TLS.
|
||||
caFingerprint: '20:0D:CA:FA:76:...',
|
||||
tls: {
|
||||
// might be required if it's a self-signed certificate
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
The certificate fingerprint can be calculated using `openssl x509` with the certificate file:
|
||||
|
||||
```sh
|
||||
openssl x509 -fingerprint -sha256 -noout -in /path/to/http_ca.crt
|
||||
```
|
||||
|
||||
If you don’t have access to the generated CA file from {{es}} you can use the following script to output the root CA fingerprint of the {{es}} instance with `openssl s_client`:
|
||||
|
||||
```sh
|
||||
# Replace the values of 'localhost' and '9200' to the
|
||||
# corresponding host and port values for the cluster.
|
||||
openssl s_client -connect localhost:9200 -servername localhost -showcerts </dev/null 2>/dev/null \
|
||||
| openssl x509 -fingerprint -sha256 -noout -in /dev/stdin
|
||||
```
|
||||
|
||||
The output of `openssl x509` will look something like this:
|
||||
|
||||
```sh
|
||||
SHA256 Fingerprint=A5:2D:D9:35:11:E8:C6:04:5E:21:F1:66:54:B7:7C:9E:E0:F3:4A:EA:26:D9:F4:03:20:B5:31:C4:74:67:62:28
|
||||
```
|
||||
|
||||
|
||||
## Connecting without security enabled [connect-no-security]
|
||||
|
||||
::::{warning}
|
||||
Running {{es}} without security enabled is not recommended.
|
||||
::::
|
||||
|
||||
|
||||
If your cluster is configured with [security explicitly disabled](elasticsearch://docs/reference/elasticsearch/configuration-reference/security-settings.md) then you can connect via HTTP:
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://example.com'
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
## Authentication strategies [auth-strategies]
|
||||
|
||||
Following you can find all the supported authentication strategies.
|
||||
|
||||
|
||||
### ApiKey authentication [auth-apikey]
|
||||
|
||||
You can use the [ApiKey](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-api-key) authentication by passing the `apiKey` parameter via the `auth` option. The `apiKey` parameter can be either a base64 encoded string or an object with the values that you can obtain from the [create api key endpoint](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-api-key).
|
||||
|
||||
::::{note}
|
||||
If you provide both basic authentication credentials and the ApiKey configuration, the ApiKey takes precedence.
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: 'base64EncodedKey'
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
apiKey: {
|
||||
id: 'foo',
|
||||
api_key: 'bar'
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
### Bearer authentication [auth-bearer]
|
||||
|
||||
You can provide your credentials by passing the `bearer` token parameter via the `auth` option. Useful for [service account tokens](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-security-create-service-token). Be aware that it does not handle automatic token refresh.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
bearer: 'token'
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
### Basic authentication [auth-basic]
|
||||
|
||||
You can provide your credentials by passing the `username` and `password` parameters via the `auth` option.
|
||||
|
||||
::::{note}
|
||||
If you provide both basic authentication credentials and the Api Key configuration, the Api Key will take precedence.
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
Otherwise, you can provide your credentials in the node(s) URL.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://username:password@localhost:9200'
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
## Usage [client-usage]
|
||||
|
||||
Using the client is straightforward, it supports all the public APIs of {{es}}, and every method exposes the same signature.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
The returned value of every API call is the response body from {{es}}. If you need to access additonal metadata, such as the status code or headers, you must specify `meta: true` in the request options:
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, { meta: true })
|
||||
```
|
||||
|
||||
In this case, the result will be:
|
||||
|
||||
```ts
|
||||
{
|
||||
body: object | boolean
|
||||
statusCode: number
|
||||
headers: object
|
||||
warnings: string[],
|
||||
meta: object
|
||||
}
|
||||
```
|
||||
|
||||
::::{note}
|
||||
The body is a boolean value when you use `HEAD` APIs.
|
||||
::::
|
||||
|
||||
|
||||
|
||||
### Aborting a request [_aborting_a_request]
|
||||
|
||||
If needed, you can abort a running request by using the `AbortController` standard.
|
||||
|
||||
::::{warning}
|
||||
If you abort a request, the request will fail with a `RequestAbortedError`.
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
const AbortController = require('node-abort-controller')
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' }
|
||||
})
|
||||
|
||||
const abortController = new AbortController()
|
||||
setImmediate(() => abortController.abort())
|
||||
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}, { signal: abortController.signal })
|
||||
```
|
||||
|
||||
|
||||
### Request specific options [_request_specific_options]
|
||||
|
||||
If needed you can pass request specific options in a second object:
|
||||
|
||||
```js
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
body: {
|
||||
query: {
|
||||
match: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
})
|
||||
```
|
||||
|
||||
The supported request specific options are:
|
||||
|
||||
| Option | Description |
|
||||
| --- | ----------- |
|
||||
| `ignore` | `number[]` - HTTP status codes which should not be considered errors for this request.<br>*Default:* `null` |
|
||||
| `requestTimeout` | `number` or `string` - Max request timeout for the request in milliseconds. This overrides the client default, which is to not time out at all. See [Elasticsearch best practices for HTML clients](elasticsearch://docs/reference/elasticsearch/configuration-reference/networking-settings.md#_http_client_configuration) for more info.<br>_Default:* No timeout |
|
||||
| `retryOnTimeout` | `boolean` - Retry requests that have timed out.*Default:* `false` |
|
||||
| `maxRetries` | `number` - Max number of retries for the request, it overrides the client default.<br>*Default:* `3` |
|
||||
| `compression` | `string` or `boolean` - Enables body compression for the request.<br>*Options:* `false`, `'gzip'`<br>*Default:* `false` |
|
||||
| `asStream` | `boolean` - Instead of getting the parsed body back, you get the raw Node.js stream of data.<br>*Default:* `false` |
|
||||
| `headers` | `object` - Custom headers for the request.<br>*Default:* `null` |
|
||||
|`querystring` | `object` - Custom querystring for the request.<br>*Default:* `null` |
|
||||
| `id` | `any` - Custom request ID. *(overrides the top level request id generator)*<br>*Default:* `null` |
|
||||
| `context` | `any` - Custom object per request. *(you can use it to pass data to the clients events)*<br>*Default:* `null` |
|
||||
| `opaqueId` | `string` - Set the `X-Opaque-Id` HTTP header. See [X-Opaque-Id HTTP header](elasticsearch://docs/reference/elasticsearch/rest-apis/api-conventions.md#x-opaque-id) *Default:* `null` |
|
||||
| `maxResponseSize` | `number` - When configured, it verifies that the uncompressed response size is lower than the configured number, if it’s higher it will abort the request. It cannot be higher than buffer.constants.MAX_STRING_LENTGH<br>*Default:* `null` |
|
||||
| `maxCompressedResponseSize` | `number` - When configured, it verifies that the compressed response size is lower than the configured number, if it’s higher it will abort the request. It cannot be higher than buffer.constants.MAX_LENTGH<br>*Default:* `null` |
|
||||
| `signal` | `AbortSignal` - The AbortSignal instance to allow request abortion.<br>*Default:* `null` |
|
||||
| `meta` | `boolean` - Rather than returning the body, return an object containing `body`, `statusCode`, `headers` and `meta` keys<br>*Default*: `false` |
|
||||
| `redaction` | `object` - Options for redacting potentially sensitive data from error metadata. See [Redaction of potentially sensitive data](/reference/advanced-config.md#redaction). | `retryBackoff` |
|
||||
|
||||
## Using the Client in a Function-as-a-Service Environment [client-faas-env]
|
||||
|
||||
This section illustrates the best practices for leveraging the {{es}} client in a Function-as-a-Service (FaaS) environment. The most influential optimization is to initialize the client outside of the function, the global scope. This practice does not only improve performance but also enables background functionality as – for example – [sniffing](https://www.elastic.co/blog/elasticsearch-sniffing-best-practices-what-when-why-how). The following examples provide a skeleton for the best practices.
|
||||
|
||||
|
||||
### GCP Cloud Functions [_gcp_cloud_functions]
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
exports.testFunction = async function (req, res) {
|
||||
// use the client
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### AWS Lambda [_aws_lambda]
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
exports.handler = async function (event, context) {
|
||||
// use the client
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Azure Functions [_azure_functions]
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
|
||||
const client = new Client({
|
||||
// client initialisation
|
||||
})
|
||||
|
||||
module.exports = async function (context, req) {
|
||||
// use the client
|
||||
}
|
||||
```
|
||||
|
||||
Resources used to assess these recommendations:
|
||||
|
||||
* [GCP Cloud Functions: Tips & Tricks](https://cloud.google.com/functions/docs/bestpractices/tips#use_global_variables_to_reuse_objects_in_future_invocations)
|
||||
* [Best practices for working with AWS Lambda functions](https://docs.aws.amazon.com/lambda/latest/dg/best-practices.md)
|
||||
* [Azure Functions Python developer guide](https://docs.microsoft.com/en-us/azure/azure-functions/functions-reference-python?tabs=azurecli-linux%2Capplication-level#global-variables)
|
||||
* [AWS Lambda: Comparing the effect of global scope](https://docs.aws.amazon.com/lambda/latest/operatorguide/global-scope.md)
|
||||
|
||||
|
||||
## Connecting through a proxy [client-connect-proxy]
|
||||
|
||||
Added in `v7.10.0`
|
||||
|
||||
If you need to pass through an http(s) proxy for connecting to {{es}}, the client out of the box offers a handy configuration for helping you with it. Under the hood, it uses the [`hpagent`](https://github.com/delvedor/hpagent) module.
|
||||
|
||||
::::{important}
|
||||
In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations. To use a proxy, you will need to use the `HttpConnection` class from `@elastic/transport` instead.
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
import { HttpConnection } from '@elastic/transport'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http://localhost:8080',
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
```
|
||||
|
||||
Basic authentication is supported as well:
|
||||
|
||||
```js
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http:user:pwd@//localhost:8080',
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
```
|
||||
|
||||
If you are connecting through a non-http(s) proxy, such as a `socks5` or `pac`, you can use the `agent` option to configure it.
|
||||
|
||||
```js
|
||||
const SocksProxyAgent = require('socks-proxy-agent')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent () {
|
||||
return new SocksProxyAgent('socks://127.0.0.1:1080')
|
||||
},
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
## Error handling [client-error-handling]
|
||||
|
||||
The client exposes a variety of error objects that you can use to enhance your error handling. You can find all the error objects inside the `errors` key in the client.
|
||||
|
||||
```js
|
||||
const { errors } = require('@elastic/elasticsearch')
|
||||
console.log(errors)
|
||||
```
|
||||
|
||||
You can find the errors exported by the client in the table below.
|
||||
|
||||
| | | |
|
||||
| --- | --- | --- |
|
||||
| **Error** | **Description** | **Properties** |
|
||||
| `ElasticsearchClientError` | Every error inherits from this class, it is the basic error generated by the client. | * `name` - `string`<br>* `message` - `string`<br> |
|
||||
| `TimeoutError` | Generated when a request exceeds the `requestTimeout` option. | * `name` - `string`<br>* `message` - `string`<br>* `meta` - `object`, contains all the information about the request<br> |
|
||||
| `ConnectionError` | Generated when an error occurs during the request, it can be a connection error or a malformed stream of data. | * `name` - `string`<br>* `message` - `string`<br>* `meta` - `object`, contains all the information about the request<br> |
|
||||
| `RequestAbortedError` | Generated if the user calls the `request.abort()` method. | * `name` - `string`<br>* `message` - `string`<br>* `meta` - `object`, contains all the information about the request<br> |
|
||||
| `NoLivingConnectionsError` | Given the configuration, the ConnectionPool was not able to find a usable Connection for this request. | * `name` - `string`<br>* `message` - `string`<br>* `meta` - `object`, contains all the information about the request<br> |
|
||||
| `SerializationError` | Generated if the serialization fails. | * `name` - `string`<br>* `message` - `string`<br>* `data` - `object`, the object to serialize<br> |
|
||||
| `DeserializationError` | Generated if the deserialization fails. | * `name` - `string`<br>* `message` - `string`<br>* `data` - `string`, the string to deserialize<br> |
|
||||
| `ConfigurationError` | Generated if there is a malformed configuration or parameter. | * `name` - `string`<br>* `message` - `string`<br> |
|
||||
| `ResponseError` | Generated when in case of a `4xx` or `5xx` response. | * `name` - `string`<br>* `message` - `string`<br>* `meta` - `object`, contains all the information about the request<br>* `body` - `object`, the response body<br>* `statusCode` - `object`, the response headers<br>* `headers` - `object`, the response status code<br> |
|
||||
|
||||
|
||||
## Keep-alive connections [keep-alive]
|
||||
|
||||
By default, the client uses persistent, keep-alive connections to reduce the overhead of creating a new HTTP connection for each Elasticsearch request. If you are using the default `UndiciConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 10 minutes. If you are using the legacy `HttpConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 1 minute.
|
||||
|
||||
If you need to disable keep-alive connections, you can override the HTTP agent with your preferred [HTTP agent options](https://nodejs.org/api/http.md#http_new_agent_options):
|
||||
|
||||
```js
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// the function takes as parameter the option
|
||||
// object passed to the Connection constructor
|
||||
agent: (opts) => new CustomAgent()
|
||||
})
|
||||
```
|
||||
|
||||
Or you can disable the HTTP agent entirely:
|
||||
|
||||
```js
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// Disable agent and keep-alive
|
||||
agent: false
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
## Closing a client’s connections [close-connections]
|
||||
|
||||
If you would like to close all open connections being managed by an instance of the client, use the `close()` function:
|
||||
|
||||
```js
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
});
|
||||
client.close();
|
||||
```
|
||||
|
||||
|
||||
## Automatic product check [product-check]
|
||||
|
||||
Since v7.14.0, the client performs a required product check before the first call. This pre-flight product check allows the client to establish the version of Elasticsearch that it is communicating with. The product check requires one additional HTTP request to be sent to the server as part of the request pipeline before the main API call is sent. In most cases, this will succeed during the very first API call that the client sends. Once the product check completes, no further product check HTTP requests are sent for subsequent API calls.
|
||||
38
docs/reference/examples.md
Normal file
38
docs/reference/examples.md
Normal file
@ -0,0 +1,38 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/examples.html
|
||||
---
|
||||
|
||||
# Examples [examples]
|
||||
|
||||
Following you can find some examples on how to use the client.
|
||||
|
||||
* Use of the [asStream](/reference/as_stream_examples.md) parameter;
|
||||
* Executing a [bulk](/reference/bulk_examples.md) request;
|
||||
* Executing a [exists](/reference/exists_examples.md) request;
|
||||
* Executing a [get](/reference/get_examples.md) request;
|
||||
* Executing a [sql.query](/reference/sql_query_examples.md) request;
|
||||
* Executing a [update](/reference/update_examples.md) request;
|
||||
* Executing a [update by query](/reference/update_by_query_examples.md) request;
|
||||
* Executing a [reindex](/reference/reindex_examples.md) request;
|
||||
* Use of the [ignore](/reference/ignore_examples.md) parameter;
|
||||
* Executing a [msearch](/reference/msearch_examples.md) request;
|
||||
* How do I [scroll](/reference/scroll_examples.md)?
|
||||
* Executing a [search](/reference/search_examples.md) request;
|
||||
* I need [suggestions](/reference/suggest_examples.md);
|
||||
* How to use the [transport.request](/reference/transport_request_examples.md) method;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@ -1,12 +1,18 @@
|
||||
[[exists_examples]]
|
||||
=== Exists
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/exists_examples.html
|
||||
---
|
||||
|
||||
# Exists [exists_examples]
|
||||
|
||||
Check that the document `/game-of-thrones/1` exists.
|
||||
|
||||
NOTE: Since this API uses the `HEAD` method, the body value will be boolean.
|
||||
::::{note}
|
||||
Since this API uses the `HEAD` method, the body value will be boolean.
|
||||
::::
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -34,4 +40,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
[[get_examples]]
|
||||
=== Get
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/get_examples.html
|
||||
---
|
||||
|
||||
The get API allows to get a typed JSON document from the index based on its id.
|
||||
The following example gets a JSON document from an index called
|
||||
`game-of-thrones`, under a type called `_doc`, with id valued `'1'`.
|
||||
# Get [get_examples]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
The get API allows to get a typed JSON document from the index based on its id. The following example gets a JSON document from an index called `game-of-thrones`, under a type called `_doc`, with id valued `'1'`.
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -34,4 +35,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
154
docs/reference/getting-started.md
Normal file
154
docs/reference/getting-started.md
Normal file
@ -0,0 +1,154 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/getting-started-js.html
|
||||
- https://www.elastic.co/guide/en/serverless/current/elasticsearch-nodejs-client-getting-started.html
|
||||
---
|
||||
|
||||
# Getting started [getting-started-js]
|
||||
|
||||
This page guides you through the installation process of the Node.js client, shows you how to instantiate the client, and how to perform basic Elasticsearch operations with it.
|
||||
|
||||
|
||||
### Requirements [_requirements]
|
||||
|
||||
* [Node.js](https://nodejs.org/) version 14.x or newer
|
||||
* [`npm`](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm), usually bundled with Node.js
|
||||
|
||||
|
||||
### Installation [_installation]
|
||||
|
||||
To install the latest version of the client, run the following command:
|
||||
|
||||
```shell
|
||||
npm install @elastic/elasticsearch
|
||||
```
|
||||
|
||||
Refer to the [*Installation*](/reference/installation.md) page to learn more.
|
||||
|
||||
|
||||
### Connecting [_connecting]
|
||||
|
||||
You can connect to the Elastic Cloud using an API key and the Elasticsearch endpoint.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'https://...', // Elasticsearch endpoint
|
||||
auth: {
|
||||
apiKey: { // API key ID and secret
|
||||
id: 'foo',
|
||||
api_key: 'bar',
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
Your Elasticsearch endpoint can be found on the **My deployment** page of your deployment:
|
||||
|
||||
:::{image} ../images/es-endpoint.jpg
|
||||
:alt: Finding Elasticsearch endpoint
|
||||
:::
|
||||
|
||||
You can generate an API key on the **Management** page under Security.
|
||||
|
||||
:::{image} ../images/create-api-key.png
|
||||
:alt: Create API key
|
||||
:::
|
||||
|
||||
For other connection options, refer to the [*Connecting*](/reference/connecting.md) section.
|
||||
|
||||
|
||||
### Operations [_operations]
|
||||
|
||||
Time to use Elasticsearch! This section walks you through the basic, and most important, operations of Elasticsearch.
|
||||
|
||||
|
||||
#### Creating an index [_creating_an_index]
|
||||
|
||||
This is how you create the `my_index` index:
|
||||
|
||||
```js
|
||||
await client.indices.create({ index: 'my_index' })
|
||||
```
|
||||
|
||||
|
||||
#### Indexing documents [_indexing_documents]
|
||||
|
||||
This is a simple way of indexing a document:
|
||||
|
||||
```js
|
||||
await client.index({
|
||||
index: 'my_index',
|
||||
id: 'my_document_id',
|
||||
document: {
|
||||
foo: 'foo',
|
||||
bar: 'bar',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Getting documents [_getting_documents]
|
||||
|
||||
You can get documents by using the following code:
|
||||
|
||||
```js
|
||||
await client.get({
|
||||
index: 'my_index',
|
||||
id: 'my_document_id',
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Searching documents [_searching_documents]
|
||||
|
||||
This is how you can create a single match query with the client:
|
||||
|
||||
```js
|
||||
await client.search({
|
||||
query: {
|
||||
match: {
|
||||
foo: 'foo'
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Updating documents [_updating_documents]
|
||||
|
||||
This is how you can update a document, for example to add a new field:
|
||||
|
||||
```js
|
||||
await client.update({
|
||||
index: 'my_index',
|
||||
id: 'my_document_id',
|
||||
doc: {
|
||||
foo: 'bar',
|
||||
new_field: 'new value'
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Deleting documents [_deleting_documents]
|
||||
|
||||
```js
|
||||
await client.delete({
|
||||
index: 'my_index',
|
||||
id: 'my_document_id',
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
#### Deleting an index [_deleting_an_index]
|
||||
|
||||
```js
|
||||
await client.indices.delete({ index: 'my_index' })
|
||||
```
|
||||
|
||||
|
||||
## Further reading [_further_reading]
|
||||
|
||||
* Use [*Client helpers*](/reference/client-helpers.md) for a more comfortable experience with the APIs.
|
||||
* For an elaborate example of how to ingest data into Elastic Cloud, refer to [this page](docs-content://manage-data/ingest/ingesting-data-from-applications/ingest-data-with-nodejs-on-elasticsearch-service.md).
|
||||
@ -1,10 +1,13 @@
|
||||
[[ignore_examples]]
|
||||
=== Ignore
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/ignore_examples.html
|
||||
---
|
||||
|
||||
# Ignore [ignore_examples]
|
||||
|
||||
HTTP status codes which should not be considered errors for this request.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -62,4 +65,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,12 +1,15 @@
|
||||
[[introduction]]
|
||||
== Introduction
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/introduction.html
|
||||
---
|
||||
|
||||
This is the official Node.js client for {es}. This page gives a quick overview
|
||||
about the features of the client.
|
||||
# JavaScript [introduction]
|
||||
|
||||
This is the official Node.js client for {{es}}. This page gives a quick overview about the features of the client.
|
||||
|
||||
|
||||
[discrete]
|
||||
=== Features
|
||||
## Features [_features]
|
||||
|
||||
* One-to-one mapping with REST API.
|
||||
* Generalized, pluggable architecture.
|
||||
@ -17,45 +20,35 @@ about the features of the client.
|
||||
* TypeScript support out of the box.
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Install multiple versions
|
||||
### Install multiple versions [_install_multiple_versions]
|
||||
|
||||
If you are using multiple versions of {es}, you need to use multiple versions of
|
||||
the client as well. In the past, installing multiple versions of the same
|
||||
package was not possible, but with `npm v6.9`, you can do it via aliasing.
|
||||
If you are using multiple versions of {{es}}, you need to use multiple versions of the client as well. In the past, installing multiple versions of the same package was not possible, but with `npm v6.9`, you can do it via aliasing.
|
||||
|
||||
To install different version of the client, run the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
```sh
|
||||
npm install <alias>@npm:@elastic/elasticsearch@<version>
|
||||
----
|
||||
|
||||
```
|
||||
|
||||
For example, if you need to install `7.x` and `6.x`, run the following commands:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
```sh
|
||||
npm install es6@npm:@elastic/elasticsearch@6
|
||||
npm install es7@npm:@elastic/elasticsearch@7
|
||||
----
|
||||
|
||||
```
|
||||
|
||||
Your `package.json` will look similar to the following example:
|
||||
|
||||
[source,json]
|
||||
----
|
||||
```json
|
||||
"dependencies": {
|
||||
"es6": "npm:@elastic/elasticsearch@^6.7.0",
|
||||
"es7": "npm:@elastic/elasticsearch@^7.0.0"
|
||||
}
|
||||
----
|
||||
|
||||
```
|
||||
|
||||
Require the packages from your code by using the alias you have defined.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client: Client6 } = require('es6')
|
||||
const { Client: Client7 } = require('es7')
|
||||
|
||||
@ -70,15 +63,16 @@ const client7 = new Client7({
|
||||
|
||||
client6.info().then(console.log, console.log)
|
||||
client7.info().then(console.log, console.log)
|
||||
----
|
||||
```
|
||||
|
||||
Finally, if you want to install the client for the next version of {{es}} (the one that lives in the {{es}} main branch), use the following command:
|
||||
|
||||
Finally, if you want to install the client for the next version of {es} (the one
|
||||
that lives in the {es} main branch), use the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
```sh
|
||||
npm install esmain@github:elastic/elasticsearch-js
|
||||
----
|
||||
WARNING: This command installs the main branch of the client which is not
|
||||
considered stable.
|
||||
```
|
||||
|
||||
::::{warning}
|
||||
This command installs the main branch of the client which is not considered stable.
|
||||
::::
|
||||
|
||||
|
||||
65
docs/reference/installation.md
Normal file
65
docs/reference/installation.md
Normal file
@ -0,0 +1,65 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/installation.html
|
||||
---
|
||||
|
||||
# Installation [installation]
|
||||
|
||||
This page guides you through the installation process of the client.
|
||||
|
||||
To install the latest version of the client, run the following command:
|
||||
|
||||
```sh
|
||||
npm install @elastic/elasticsearch
|
||||
```
|
||||
|
||||
To install a specific major version of the client, run the following command:
|
||||
|
||||
```sh
|
||||
npm install @elastic/elasticsearch@<major>
|
||||
```
|
||||
|
||||
To learn more about the supported major versions, please refer to the [Compatibility matrix](#js-compatibility-matrix).
|
||||
|
||||
|
||||
## Node.js support [nodejs-support]
|
||||
|
||||
::::{note}
|
||||
The minimum supported version of Node.js is `v18`.
|
||||
::::
|
||||
|
||||
|
||||
The client versioning follows the {{stack}} versioning, this means that major, minor, and patch releases are done following a precise schedule that often does not coincide with the [Node.js release](https://nodejs.org/en/about/releases/) times.
|
||||
|
||||
To avoid support insecure and unsupported versions of Node.js, the client **will drop the support of EOL versions of Node.js between minor releases**. Typically, as soon as a Node.js version goes into EOL, the client will continue to support that version for at least another minor release. If you are using the client with a version of Node.js that will be unsupported soon, you will see a warning in your logs (the client will start logging the warning with two minors in advance).
|
||||
|
||||
Unless you are **always** using a supported version of Node.js, we recommend defining the client dependency in your `package.json` with the `~` instead of `^`. In this way, you will lock the dependency on the minor release and not the major. (for example, `~7.10.0` instead of `^7.10.0`).
|
||||
|
||||
| Node.js Version | Node.js EOL date | End of support |
|
||||
| --- | --- | --- |
|
||||
| `8.x` | December 2019 | `7.11` (early 2021) |
|
||||
| `10.x` | April 2021 | `7.12` (mid 2021) |
|
||||
| `12.x` | April 2022 | `8.2` (early 2022) |
|
||||
| `14.x` | April 2023 | `8.8` (early 2023) |
|
||||
| `16.x` | September 2023 | `8.11` (late 2023) |
|
||||
|
||||
|
||||
## Compatibility matrix [js-compatibility-matrix]
|
||||
|
||||
Language clients are forward compatible; meaning that clients support communicating with greater or equal minor versions of {{es}} without breaking. It does not mean that the client automatically supports new features of newer {{es}} versions; it is only possible after a release of a new client version. For example, a 8.12 client version won’t automatically support the new features of the 8.13 version of {{es}}, the 8.13 client version is required for that. {{es}} language clients are only backwards compatible with default distributions and without guarantees made.
|
||||
|
||||
| {{es}} Version | Client Version | Supported |
|
||||
| --- | --- | --- |
|
||||
| `8.x` | `8.x` | `8.x` |
|
||||
| `7.x` | `7.x` | `7.17` |
|
||||
| `6.x` | `6.x` | |
|
||||
| `5.x` | `5.x` | |
|
||||
|
||||
|
||||
### Browser [_browser]
|
||||
|
||||
::::{warning}
|
||||
There is no official support for the browser environment. It exposes your {{es}} instance to everyone, which could lead to security issues. We recommend you to write a lightweight proxy that uses this client instead, you can see a proxy example [here](https://github.com/elastic/elasticsearch-js/tree/master/docs/examples/proxy).
|
||||
::::
|
||||
|
||||
|
||||
16
docs/reference/integrations.md
Normal file
16
docs/reference/integrations.md
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/integrations.html
|
||||
---
|
||||
|
||||
# Integrations [integrations]
|
||||
|
||||
The Client offers the following integration options for you:
|
||||
|
||||
* [Observability](/reference/observability.md)
|
||||
* [Transport](/reference/transport.md)
|
||||
* [TypeScript support](/reference/typescript.md)
|
||||
|
||||
|
||||
|
||||
|
||||
@ -1,11 +1,13 @@
|
||||
[[msearch_examples]]
|
||||
=== MSearch
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/msearch_examples.html
|
||||
---
|
||||
|
||||
The multi search API allows to execute several search requests within the same
|
||||
API.
|
||||
# MSearch [msearch_examples]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
The multi search API allows to execute several search requests within the same API.
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -57,4 +59,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,64 +1,54 @@
|
||||
[[observability]]
|
||||
=== Observability
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html
|
||||
---
|
||||
|
||||
# Observability [observability]
|
||||
|
||||
To observe and measure Elasticsearch client usage, several client features are provided.
|
||||
|
||||
First, as of 8.15.0, the client provides native support for OpenTelemetry, which allows you to send client usage data to any endpoint that supports OpenTelemetry without having to make any changes to your JavaScript codebase.
|
||||
|
||||
Also, rather than providing a default logger, the client offers an event
|
||||
emitter interface to hook into internal events, such as `request` and
|
||||
`response`, allowing you to log the events you care about, or otherwise react
|
||||
to client usage however you might need.
|
||||
Also, rather than providing a default logger, the client offers an event emitter interface to hook into internal events, such as `request` and `response`, allowing you to log the events you care about, or otherwise react to client usage however you might need.
|
||||
|
||||
Correlating events can be hard, especially if your applications have a large codebase with many events happening at the same time. To help you with this, the client provides a correlation ID system, and other
|
||||
features.
|
||||
Correlating events can be hard, especially if your applications have a large codebase with many events happening at the same time. To help you with this, the client provides a correlation ID system, and other features.
|
||||
|
||||
All of these observability features are documented below.
|
||||
|
||||
[discrete]
|
||||
==== OpenTelemetry
|
||||
|
||||
The client supports OpenTelemetry's https://opentelemetry.io/docs/zero-code/js/[zero-code
|
||||
instrumentation] to enable tracking each client request as an
|
||||
https://opentelemetry.io/docs/concepts/signals/traces/#spans[OpenTelemetry span]. These spans
|
||||
follow all of the https://opentelemetry.io/docs/specs/semconv/database/elasticsearch/[semantic
|
||||
OpenTelemetry conventions for Elasticsearch] except for `db.query.text`.
|
||||
## OpenTelemetry [_opentelemetry]
|
||||
|
||||
To start sending Elasticsearch trace data to your OpenTelemetry endpoint, follow
|
||||
https://opentelemetry.io/docs/zero-code/js/[OpenTelemetry's zero-code instrumentation guide],
|
||||
or the following steps:
|
||||
The client supports OpenTelemetry’s [zero-code instrumentation](https://opentelemetry.io/docs/zero-code/js/) to enable tracking each client request as an [OpenTelemetry span](https://opentelemetry.io/docs/concepts/signals/traces/#spans). These spans follow all of the [semantic OpenTelemetry conventions for Elasticsearch](https://opentelemetry.io/docs/specs/semconv/database/elasticsearch/) except for `db.query.text`.
|
||||
|
||||
To start sending Elasticsearch trace data to your OpenTelemetry endpoint, follow [OpenTelemetry’s zero-code instrumentation guide](https://opentelemetry.io/docs/zero-code/js/), or the following steps:
|
||||
|
||||
1. Install `@opentelemetry/api` and `@opentelemetry/auto-instrumentations-node` as Node.js dependencies
|
||||
2. Export the following environment variables with the appropriate values:
|
||||
- `OTEL_EXPORTER_OTLP_ENDPOINT`
|
||||
- `OTEL_EXPORTER_OTLP_HEADERS`
|
||||
- `OTEL_RESOURCE_ATTRIBUTES`
|
||||
- `OTEL_SERVICE_NAME`
|
||||
|
||||
* `OTEL_EXPORTER_OTLP_ENDPOINT`
|
||||
* `OTEL_EXPORTER_OTLP_HEADERS`
|
||||
* `OTEL_RESOURCE_ATTRIBUTES`
|
||||
* `OTEL_SERVICE_NAME`
|
||||
|
||||
3. `require` the Node.js auto-instrumentation library at startup:
|
||||
[source,bash]
|
||||
----
|
||||
|
||||
```
|
||||
node --require '@opentelemetry/auto-instrumentations-node/register' index.js
|
||||
----
|
||||
```
|
||||
|
||||
[discrete]
|
||||
==== Events
|
||||
|
||||
The client is an event emitter. This means that you can listen for its events to
|
||||
add additional logic to your code, without needing to change the client's internals
|
||||
or how you use the client. You can find the events' names by accessing the `events` key
|
||||
of the client:
|
||||
## Events [_events]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
The client is an event emitter. This means that you can listen for its events to add additional logic to your code, without needing to change the client’s internals or how you use the client. You can find the events' names by accessing the `events` key of the client:
|
||||
|
||||
```js
|
||||
const { events } = require('@elastic/elasticsearch')
|
||||
console.log(events)
|
||||
----
|
||||
```
|
||||
|
||||
The event emitter functionality can be useful if you want to log every request,
|
||||
response or error that is created by the client:
|
||||
The event emitter functionality can be useful if you want to log every request, response or error that is created by the client:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const logger = require('my-logger')()
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
@ -73,72 +63,22 @@ client.diagnostic.on('response', (err, result) => {
|
||||
logger.info(result)
|
||||
}
|
||||
})
|
||||
----
|
||||
```
|
||||
|
||||
The client emits the following events:
|
||||
[cols=2*]
|
||||
|===
|
||||
|`serialization`
|
||||
a|Emitted before starting serialization and compression. If you want to measure this phase duration, you should measure the time elapsed between this event and `request`.
|
||||
[source,js]
|
||||
----
|
||||
client.diagnostic.on('serialization', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
|
||||
|`request`
|
||||
a|Emitted before sending the actual request to {es} _(emitted multiple times in case of retries)_.
|
||||
[source,js]
|
||||
----
|
||||
client.diagnostic.on('request', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
| | |
|
||||
| --- | --- |
|
||||
| `serialization` | Emitted before starting serialization and compression. If you want to measure this phase duration, you should measure the time elapsed between this event and `request`.<br><br>```js<br>client.diagnostic.on('serialization', (err, result) => {<br> console.log(err, result)<br>})<br>```<br> |
|
||||
| `request` | Emitted before sending the actual request to {{es}} *(emitted multiple times in case of retries)*.<br><br>```js<br>client.diagnostic.on('request', (err, result) => {<br> console.log(err, result)<br>})<br>```<br> |
|
||||
| `deserialization` | Emitted before starting deserialization and decompression. If you want to measure this phase duration, you should measure the time elapsed between this event and `response`. *(This event might not be emitted in certain situations)*.<br><br>```js<br>client.diagnostic.on('deserialization', (err, result) => {<br> console.log(err, result)<br>})<br>```<br> |
|
||||
| `response` | Emitted once {{es}} response has been received and parsed.<br><br>```js<br>client.diagnostic.on('response', (err, result) => {<br> console.log(err, result)<br>})<br>```<br> |
|
||||
| `sniff` | Emitted when the client ends a sniffing request.<br><br>```js<br>client.diagnostic.on('sniff', (err, result) => {<br> console.log(err, result)<br>})<br>```<br> |
|
||||
| `resurrect` | Emitted if the client is able to resurrect a dead node.<br><br>```js<br>client.diagnostic.on('resurrect', (err, result) => {<br> console.log(err, result)<br>})<br>```<br> |
|
||||
|
||||
|`deserialization`
|
||||
a|Emitted before starting deserialization and decompression. If you want to measure this phase duration, you should measure the time elapsed between this event and `response`. _(This event might not be emitted in certain situations)_.
|
||||
[source,js]
|
||||
----
|
||||
client.diagnostic.on('deserialization', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
The values of `result` in `serialization`, `request`, `deserialization`, `response` and `sniff` are:
|
||||
|
||||
|`response`
|
||||
a|Emitted once {es} response has been received and parsed.
|
||||
[source,js]
|
||||
----
|
||||
client.diagnostic.on('response', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
|
||||
|`sniff`
|
||||
a|Emitted when the client ends a sniffing request.
|
||||
[source,js]
|
||||
----
|
||||
client.diagnostic.on('sniff', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
|
||||
|`resurrect`
|
||||
a|Emitted if the client is able to resurrect a dead node.
|
||||
[source,js]
|
||||
----
|
||||
client.diagnostic.on('resurrect', (err, result) => {
|
||||
console.log(err, result)
|
||||
})
|
||||
----
|
||||
|
||||
|===
|
||||
|
||||
The values of `result` in `serialization`, `request`, `deserialization`,
|
||||
`response` and `sniff` are:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
```ts
|
||||
body: any;
|
||||
statusCode: number | null;
|
||||
headers: anyObject | null;
|
||||
@ -159,12 +99,11 @@ meta: {
|
||||
reason: string;
|
||||
};
|
||||
};
|
||||
----
|
||||
```
|
||||
|
||||
While the `result` value in `resurrect` is:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
```ts
|
||||
strategy: string;
|
||||
isAlive: boolean;
|
||||
connection: Connection;
|
||||
@ -172,19 +111,14 @@ name: string;
|
||||
request: {
|
||||
id: any;
|
||||
};
|
||||
----
|
||||
```
|
||||
|
||||
[discrete]
|
||||
===== Events order
|
||||
|
||||
The event order is described in the following graph, in some edge cases, the
|
||||
order is not guaranteed.
|
||||
You can find in
|
||||
https://github.com/elastic/elasticsearch-js/blob/main/test/acceptance/events-order.test.js[`test/acceptance/events-order.test.js`]
|
||||
how the order changes based on the situation.
|
||||
### Events order [_events_order]
|
||||
|
||||
[source]
|
||||
----
|
||||
The event order is described in the following graph, in some edge cases, the order is not guaranteed. You can find in [`test/acceptance/events-order.test.js`](https://github.com/elastic/elasticsearch-js/blob/main/test/acceptance/events-order.test.js) how the order changes based on the situation.
|
||||
|
||||
```
|
||||
serialization
|
||||
│
|
||||
│ (serialization and compression happens between those two events)
|
||||
@ -198,17 +132,14 @@ serialization
|
||||
│ (deserialization and decompression happens between those two events)
|
||||
│
|
||||
└─▶ response
|
||||
----
|
||||
```
|
||||
|
||||
[discrete]
|
||||
==== Correlation ID
|
||||
|
||||
Correlating events can be hard, especially if there are many events at the same
|
||||
time. The client offers you an automatic (and configurable) system to help you
|
||||
handle this problem.
|
||||
## Correlation ID [_correlation_id]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
Correlating events can be hard, especially if there are many events at the same time. The client offers you an automatic (and configurable) system to help you handle this problem.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
@ -233,13 +164,11 @@ client.search({
|
||||
index: 'my-index',
|
||||
query: { match_all: {} }
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
```
|
||||
|
||||
By default the ID is an incremental integer, but you can configure it with the
|
||||
`generateRequestId` option:
|
||||
By default the ID is an incremental integer, but you can configure it with the `generateRequestId` option:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
@ -251,30 +180,25 @@ const client = new Client({
|
||||
return 'id'
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
```
|
||||
|
||||
You can also specify a custom ID per request:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
query: { match_all: {} }
|
||||
}, {
|
||||
id: 'custom-id'
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
```
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Context object
|
||||
## Context object [_context_object]
|
||||
|
||||
Sometimes, you might need to make some custom data available in your events, you
|
||||
can do that via the `context` option of a request:
|
||||
Sometimes, you might need to make some custom data available in your events, you can do that via the `context` option of a request:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
@ -303,14 +227,11 @@ client.search({
|
||||
}, {
|
||||
context: { winter: 'is coming' }
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
```
|
||||
|
||||
The context object can also be configured as a global option in the client
|
||||
configuration. If you provide both, the two context objects will be shallow
|
||||
merged, and the API level object will take precedence.
|
||||
The context object can also be configured as a global option in the client configuration. If you provide both, the two context objects will be shallow merged, and the API level object will take precedence.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
@ -340,19 +261,14 @@ client.search({
|
||||
}, {
|
||||
context: { winter: 'has come' }
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
```
|
||||
|
||||
|
||||
[discrete]
|
||||
==== Client name
|
||||
## Client name [_client_name]
|
||||
|
||||
If you are using multiple instances of the client or if you are using multiple
|
||||
child clients _(which is the recommended way to have multiple instances of the
|
||||
client)_, you might need to recognize which client you are using. The `name`
|
||||
options help you in this regard.
|
||||
If you are using multiple instances of the client or if you are using multiple child clients *(which is the recommended way to have multiple instances of the client)*, you might need to recognize which client you are using. The `name` options help you in this regard.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
@ -391,25 +307,16 @@ child.search({
|
||||
index: 'my-index',
|
||||
query: { match_all: {} }
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
```
|
||||
|
||||
|
||||
[discrete]
|
||||
==== X-Opaque-Id support
|
||||
## X-Opaque-Id support [_x_opaque_id_support]
|
||||
|
||||
To improve observability, the client offers an easy way to configure the
|
||||
`X-Opaque-Id` header. If you set the `X-Opaque-Id` in a specific request, this
|
||||
allows you to discover this identifier in the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/current/logging.html#deprecation-logging[deprecation logs],
|
||||
helps you with https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules-slowlog.html#_identifying_search_slow_log_origin[identifying search slow log origin]
|
||||
as well as https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html#_identifying_running_tasks[identifying running tasks].
|
||||
To improve observability, the client offers an easy way to configure the `X-Opaque-Id` header. If you set the `X-Opaque-Id` in a specific request, this allows you to discover this identifier in the [deprecation logs](docs-content://deploy-manage/monitor/logging-configuration/update-elasticsearch-logging-levels.md#deprecation-logging), helps you with [identifying search slow log origin](elasticsearch://docs/reference/elasticsearch/index-settings/slow-log.md) as well as [identifying running tasks](https://www.elastic.co/docs/api/doc/elasticsearch/group/endpoint-tasks).
|
||||
|
||||
The `X-Opaque-Id` should be configured in each request, for doing that you can
|
||||
use the `opaqueId` option, as you can see in the following example. The
|
||||
resulting header will be `{ 'X-Opaque-Id': 'my-search' }`.
|
||||
The `X-Opaque-Id` should be configured in each request, for doing that you can use the `opaqueId` option, as you can see in the following example. The resulting header will be `{ 'X-Opaque-Id': 'my-search' }`.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
@ -422,16 +329,11 @@ client.search({
|
||||
}, {
|
||||
opaqueId: 'my-search'
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
```
|
||||
|
||||
Sometimes it may be useful to prefix all the `X-Opaque-Id` headers with a
|
||||
specific string, in case you need to identify a specific client or server. For
|
||||
doing this, the client offers a top-level configuration option:
|
||||
`opaqueIdPrefix`. In the following example, the resulting header will be
|
||||
`{ 'X-Opaque-Id': 'proxy-client::my-search' }`.
|
||||
Sometimes it may be useful to prefix all the `X-Opaque-Id` headers with a specific string, in case you need to identify a specific client or server. For doing this, the client offers a top-level configuration option: `opaqueIdPrefix`. In the following example, the resulting header will be `{ 'X-Opaque-Id': 'proxy-client::my-search' }`.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
@ -445,5 +347,5 @@ client.search({
|
||||
}, {
|
||||
opaqueId: 'my-search'
|
||||
}).then(console.log, console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,17 +1,15 @@
|
||||
[[reindex_examples]]
|
||||
=== Reindex
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/reindex_examples.html
|
||||
---
|
||||
|
||||
The `reindex` API extracts the document source from the source index and indexes
|
||||
the documents into the destination index. You can copy all documents to the
|
||||
destination index, reindex a subset of the documents or update the source before
|
||||
to reindex it.
|
||||
# Reindex [reindex_examples]
|
||||
|
||||
In the following example we have a `game-of-thrones` index which contains
|
||||
different quotes of various characters, we want to create a new index only for
|
||||
the house Stark and remove the `house` field from the document source.
|
||||
The `reindex` API extracts the document source from the source index and indexes the documents into the destination index. You can copy all documents to the destination index, reindex a subset of the documents or update the source before to reindex it.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
In the following example we have a `game-of-thrones` index which contains different quotes of various characters, we want to create a new index only for the house Stark and remove the `house` field from the document source.
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -76,4 +74,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,28 +1,27 @@
|
||||
[[scroll_examples]]
|
||||
=== Scroll
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/scroll_examples.html
|
||||
---
|
||||
|
||||
While a search request returns a single “page” of results, the scroll API can be
|
||||
used to retrieve large numbers of results (or even all results) from a single
|
||||
search request, in much the same way as you would use a cursor on a traditional
|
||||
database.
|
||||
# Scroll [scroll_examples]
|
||||
|
||||
Scrolling is not intended for real time user requests, but rather for processing
|
||||
large amounts of data, for example in order to reindex the contents of one index
|
||||
into a new index with a different configuration.
|
||||
While a search request returns a single “page” of results, the scroll API can be used to retrieve large numbers of results (or even all results) from a single search request, in much the same way as you would use a cursor on a traditional database.
|
||||
|
||||
NOTE: The results that are returned from a scroll request reflect the state of
|
||||
the index at the time that the initial search request was made, like a snapshot
|
||||
in time. Subsequent changes to documents (index, update or delete) will only
|
||||
affect later search requests.
|
||||
Scrolling is not intended for real time user requests, but rather for processing large amounts of data, for example in order to reindex the contents of one index into a new index with a different configuration.
|
||||
|
||||
In order to use scrolling, the initial search request should specify the scroll
|
||||
parameter in the query string, which tells {es} how long it should keep the
|
||||
“search context” alive.
|
||||
::::{note}
|
||||
The results that are returned from a scroll request reflect the state of the index at the time that the initial search request was made, like a snapshot in time. Subsequent changes to documents (index, update or delete) will only affect later search requests.
|
||||
::::
|
||||
|
||||
NOTE: Did you know that we provide an helper for sending scroll requests? You can find it <<scroll-search-helper,here>>.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
In order to use scrolling, the initial search request should specify the scroll parameter in the query string, which tells {{es}} how long it should keep the “search context” alive.
|
||||
|
||||
::::{note}
|
||||
Did you know that we provide an helper for sending scroll requests? You can find it [here](/reference/client-helpers.md#scroll-search-helper).
|
||||
::::
|
||||
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -111,13 +110,11 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
Another cool usage of the `scroll` API can be done with Node.js ≥ 10, by using
|
||||
async iteration!
|
||||
Another cool usage of the `scroll` API can be done with Node.js ≥ 10, by using async iteration!
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -192,4 +189,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,14 +1,13 @@
|
||||
[[search_examples]]
|
||||
=== Search
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/search_examples.html
|
||||
---
|
||||
|
||||
The `search` API allows you to execute a search query and get back search hits
|
||||
that match the query. The query can either be provided using a simple
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-uri-request.html[query string as a parameter],
|
||||
or using a
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-request-body.html[request body].
|
||||
# Search [search_examples]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
The `search` API allows you to execute a search query and get back search hits that match the query. The query can either be provided using a simple [query string as a parameter](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search), or using a [request body](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-body.html).
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -61,4 +60,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,19 +1,15 @@
|
||||
[[sql_query_examples]]
|
||||
=== SQL
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/sql_query_examples.html
|
||||
---
|
||||
|
||||
{es} SQL is an X-Pack component that allows SQL-like queries to be executed in
|
||||
real-time against {es}. Whether using the REST interface, command-line or JDBC,
|
||||
any client can use SQL to search and aggregate data natively inside {es}. One
|
||||
can think of {es} SQL as a translator, one that understands both SQL and {es}
|
||||
and makes it easy to read and process data in real-time, at scale by leveraging
|
||||
{es} capabilities.
|
||||
# SQL [sql_query_examples]
|
||||
|
||||
In the following example we will search all the documents that has the field
|
||||
`house` equals to `stark`, log the result with the tabular view and then
|
||||
manipulate the result to obtain an object easy to navigate.
|
||||
{{es}} SQL is an X-Pack component that allows SQL-like queries to be executed in real-time against {{es}}. Whether using the REST interface, command-line or JDBC, any client can use SQL to search and aggregate data natively inside {{es}}. One can think of {{es}} SQL as a translator, one that understands both SQL and {{es}} and makes it easy to read and process data in real-time, at scale by leveraging {{es}} capabilities.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
In the following example we will search all the documents that has the field `house` equals to `stark`, log the result with the tabular view and then manipulate the result to obtain an object easy to navigate.
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -69,4 +65,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,14 +1,15 @@
|
||||
[[suggest_examples]]
|
||||
=== Suggest
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/suggest_examples.html
|
||||
---
|
||||
|
||||
The suggest feature suggests similar looking terms based on a provided text by
|
||||
using a suggester. _Parts of the suggest feature are still under development._
|
||||
# Suggest [suggest_examples]
|
||||
|
||||
The suggest request part is defined alongside the query part in a `search`
|
||||
request. If the query part is left out, only suggestions are returned.
|
||||
The suggest feature suggests similar looking terms based on a provided text by using a suggester. *Parts of the suggest feature are still under development.*
|
||||
|
||||
[source,js]
|
||||
----
|
||||
The suggest request part is defined alongside the query part in a `search` request. If the query part is left out, only suggestions are returned.
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -63,5 +64,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
```
|
||||
|
||||
----
|
||||
34
docs/reference/toc.yml
Normal file
34
docs/reference/toc.yml
Normal file
@ -0,0 +1,34 @@
|
||||
toc:
|
||||
- file: index.md
|
||||
- file: getting-started.md
|
||||
- file: installation.md
|
||||
- file: connecting.md
|
||||
- file: configuration.md
|
||||
children:
|
||||
- file: basic-config.md
|
||||
- file: advanced-config.md
|
||||
- file: child.md
|
||||
- file: client-testing.md
|
||||
- file: integrations.md
|
||||
children:
|
||||
- file: observability.md
|
||||
- file: transport.md
|
||||
- file: typescript.md
|
||||
- file: api-reference.md
|
||||
- file: examples.md
|
||||
children:
|
||||
- file: as_stream_examples.md
|
||||
- file: bulk_examples.md
|
||||
- file: exists_examples.md
|
||||
- file: get_examples.md
|
||||
- file: ignore_examples.md
|
||||
- file: msearch_examples.md
|
||||
- file: scroll_examples.md
|
||||
- file: search_examples.md
|
||||
- file: suggest_examples.md
|
||||
- file: transport_request_examples.md
|
||||
- file: sql_query_examples.md
|
||||
- file: update_examples.md
|
||||
- file: update_by_query_examples.md
|
||||
- file: reindex_examples.md
|
||||
- file: client-helpers.md
|
||||
53
docs/reference/transport.md
Normal file
53
docs/reference/transport.md
Normal file
@ -0,0 +1,53 @@
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/transport.html
|
||||
---
|
||||
|
||||
# Transport [transport]
|
||||
|
||||
This class is responsible for performing the request to {{es}} and handling errors, it also handles sniffing.
|
||||
|
||||
```js
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const { Transport } = require('@elastic/transport')
|
||||
|
||||
class MyTransport extends Transport {
|
||||
request (params, options, callback) {
|
||||
// your code
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
Transport: MyTransport
|
||||
})
|
||||
```
|
||||
|
||||
Sometimes you need to inject a small snippet of your code and then continue to use the usual client code. In such cases, call `super.method`:
|
||||
|
||||
```js
|
||||
class MyTransport extends Transport {
|
||||
request (params, options, callback) {
|
||||
// your code
|
||||
return super.request(params, options, callback)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Supported content types [_supported_content_types]
|
||||
|
||||
Depending on the `content-type` of the response, the transport will return the body as different types:
|
||||
|
||||
| Content-Type | JavaScript type |
|
||||
| --- | --- |
|
||||
| `application/json` | `object` |
|
||||
| `text/plain` | `string` |
|
||||
| `application/vnd.elasticsearch+json` | `object` |
|
||||
| `application/vnd.mapbox-vector-tile` | `Buffer` |
|
||||
| `application/vnd.apache.arrow.stream` | `Buffer` |
|
||||
| `application/vnd.elasticsearch+arrow+stream` | `Buffer` |
|
||||
| `application/smile` | `Buffer` |
|
||||
| `application/vnd.elasticsearch+smile` | `Buffer` |
|
||||
| `application/cbor` | `Buffer` |
|
||||
| `application/vnd.elasticsearch+cbor` | `Buffer` |
|
||||
|
||||
|
||||
@ -1,22 +1,23 @@
|
||||
[[transport_request_examples]]
|
||||
=== transport.request
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/transport_request_examples.html
|
||||
---
|
||||
|
||||
It can happen that you need to communicate with {es} by using an API that is not
|
||||
supported by the client, to mitigate this issue you can directly call
|
||||
`client.transport.request`, which is the internal utility that the client uses
|
||||
to communicate with {es} when you use an API method.
|
||||
# transport.request [transport_request_examples]
|
||||
|
||||
NOTE: When using the `transport.request` method you must provide all the
|
||||
parameters needed to perform an HTTP call, such as `method`, `path`,
|
||||
`querystring`, and `body`.
|
||||
It can happen that you need to communicate with {{es}} by using an API that is not supported by the client, to mitigate this issue you can directly call `client.transport.request`, which is the internal utility that the client uses to communicate with {{es}} when you use an API method.
|
||||
|
||||
::::{note}
|
||||
When using the `transport.request` method you must provide all the parameters needed to perform an HTTP call, such as `method`, `path`, `querystring`, and `body`.
|
||||
::::
|
||||
|
||||
|
||||
TIP: If you find yourself use this method too often, take in consideration the
|
||||
use of `client.extend`, which will make your code look cleaner and easier to
|
||||
maintain.
|
||||
::::{tip}
|
||||
If you find yourself use this method too often, take in consideration the use of `client.extend`, which will make your code look cleaner and easier to maintain.
|
||||
::::
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -71,4 +72,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,22 +1,23 @@
|
||||
[[typescript]]
|
||||
=== TypeScript support
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/typescript.html
|
||||
---
|
||||
|
||||
The client offers a first-class support for TypeScript, shipping a complete set
|
||||
of type definitions of Elasticsearch's API surface.
|
||||
# TypeScript support [typescript]
|
||||
|
||||
The types are not 100% complete yet. Some APIs are missing (the newest ones, e.g. EQL),
|
||||
and others may contain some errors, but we are continuously pushing fixes & improvements.
|
||||
Contribute type fixes and improvements to https://github.com/elastic/elasticsearch-specification[elasticsearch-specification github repository].
|
||||
The client offers a first-class support for TypeScript, shipping a complete set of type definitions of Elasticsearch’s API surface.
|
||||
|
||||
NOTE: The client is developed against the https://www.npmjs.com/package/typescript?activeTab=versions[latest]
|
||||
version of TypeScript. Furthermore, unless you have set `skipLibCheck` to `true`,
|
||||
you should configure `esModuleInterop` to `true`.
|
||||
The types are not 100% complete yet. Some APIs are missing (the newest ones, e.g. EQL), and others may contain some errors, but we are continuously pushing fixes & improvements. Contribute type fixes and improvements to [elasticsearch-specification github repository](https://github.com/elastic/elasticsearch-specification).
|
||||
|
||||
[discrete]
|
||||
==== Example
|
||||
::::{note}
|
||||
The client is developed against the [latest](https://www.npmjs.com/package/typescript?activeTab=versions) version of TypeScript. Furthermore, unless you have set `skipLibCheck` to `true`, you should configure `esModuleInterop` to `true`.
|
||||
::::
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
|
||||
|
||||
## Example [_example]
|
||||
|
||||
```ts
|
||||
import { Client } from '@elastic/elasticsearch'
|
||||
|
||||
const client = new Client({
|
||||
@ -71,21 +72,20 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
```
|
||||
|
||||
[discrete]
|
||||
==== Request & Response types
|
||||
|
||||
## Request & Response types [_request_response_types]
|
||||
|
||||
You can import the full TypeScript requests & responses definitions as it follows:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
```ts
|
||||
import { estypes } from '@elastic/elasticsearch'
|
||||
----
|
||||
```
|
||||
|
||||
If you need the legacy definitions with the body, you can do the following:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
```ts
|
||||
import { estypesWithBody } from '@elastic/elasticsearch'
|
||||
----
|
||||
```
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
[[update_by_query_examples]]
|
||||
=== Update By Query
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/update_by_query_examples.html
|
||||
---
|
||||
|
||||
The simplest usage of _update_by_query just performs an update on every document
|
||||
in the index without changing the source. This is useful to pick up a new
|
||||
property or some other online mapping change.
|
||||
# Update By Query [update_by_query_examples]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
The simplest usage of _update_by_query just performs an update on every document in the index without changing the source. This is useful to pick up a new property or some other online mapping change.
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -56,5 +57,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
```
|
||||
|
||||
----
|
||||
@ -1,12 +1,13 @@
|
||||
[[update_examples]]
|
||||
=== Update
|
||||
---
|
||||
mapped_pages:
|
||||
- https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/update_examples.html
|
||||
---
|
||||
|
||||
The update API allows updates of a specific document using the given script. In
|
||||
the following example, we will index a document that also tracks how many times
|
||||
a character has said the given quote, and then we will update the `times` field.
|
||||
# Update [update_examples]
|
||||
|
||||
[source,js]
|
||||
----
|
||||
The update API allows updates of a specific document using the given script. In the following example, we will index a document that also tracks how many times a character has said the given quote, and then we will update the `times` field.
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -47,13 +48,11 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
----
|
||||
```
|
||||
|
||||
With the update API, you can also run a partial update of a document.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -90,6 +89,5 @@ async function run () {
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
```
|
||||
|
||||
|
||||
----
|
||||
28
docs/release-notes/breaking-changes.md
Normal file
28
docs/release-notes/breaking-changes.md
Normal file
@ -0,0 +1,28 @@
|
||||
---
|
||||
navigation_title: "Elasticsearch JavaScript Client"
|
||||
---
|
||||
|
||||
# Elasticsearch JavaScript Client breaking changes [elasticsearch-javascript-client-breaking-changes]
|
||||
Before you upgrade, carefully review the Elasticsearch JavaScript Client breaking changes and take the necessary steps to mitigate any issues.
|
||||
|
||||
To learn how to upgrade, check out <upgrade docs>.
|
||||
|
||||
% ## Next version [elasticsearch-javascript-client-versionnext-breaking-changes]
|
||||
% **Release date:** Month day, year
|
||||
|
||||
% ::::{dropdown} Title of breaking change
|
||||
% Description of the breaking change.
|
||||
% For more information, check [PR #](PR link).
|
||||
% **Impact**<br> Impact of the breaking change.
|
||||
% **Action**<br> Steps for mitigating deprecation impact.
|
||||
% ::::
|
||||
|
||||
% ## 9.0.0 [elasticsearch-javascript-client-900-breaking-changes]
|
||||
% **Release date:** March 25, 2025
|
||||
|
||||
% ::::{dropdown} Title of breaking change
|
||||
% Description of the breaking change.
|
||||
% For more information, check [PR #](PR link).
|
||||
% **Impact**<br> Impact of the breaking change.
|
||||
% **Action**<br> Steps for mitigating deprecation impact.
|
||||
% ::::
|
||||
28
docs/release-notes/deprecations.md
Normal file
28
docs/release-notes/deprecations.md
Normal file
@ -0,0 +1,28 @@
|
||||
---
|
||||
navigation_title: "Elasticsearch JavaScript Client"
|
||||
---
|
||||
|
||||
# Elasticsearch JavaScript Client deprecations [elasticsearch-javascript-client-deprecations]
|
||||
Review the deprecated functionality for your Elasticsearch JavaScript Client version. While deprecations have no immediate impact, we strongly encourage you update your implementation after you upgrade.
|
||||
|
||||
To learn how to upgrade, check out <uprade docs>.
|
||||
|
||||
% ## Next version
|
||||
% **Release date:** Month day, year
|
||||
|
||||
% ::::{dropdown} Deprecation title
|
||||
% Description of the deprecation.
|
||||
% For more information, check [PR #](PR link).
|
||||
% **Impact**<br> Impact of deprecation.
|
||||
% **Action**<br> Steps for mitigating deprecation impact.
|
||||
% ::::
|
||||
|
||||
% ## 9.0.0 [elasticsearch-javascript-client-900-deprecations]
|
||||
% **Release date:** March 25, 2025
|
||||
|
||||
% ::::{dropdown} Deprecation title
|
||||
% Description of the deprecation.
|
||||
% For more information, check [PR #](PR link).
|
||||
% **Impact**<br> Impact of deprecation.
|
||||
% **Action**<br> Steps for mitigating deprecation impact.
|
||||
% ::::
|
||||
27
docs/release-notes/index.md
Normal file
27
docs/release-notes/index.md
Normal file
@ -0,0 +1,27 @@
|
||||
---
|
||||
navigation_title: "Elasticsearch JavaScript Client"
|
||||
---
|
||||
|
||||
# Elasticsearch JavaScript Client release notes [elasticsearch-javascript-client-release-notes]
|
||||
|
||||
Review the changes, fixes, and more in each version of Elasticsearch JavaScript Client.
|
||||
|
||||
To check for security updates, go to [Security announcements for the Elastic stack](https://discuss.elastic.co/c/announcements/security-announcements/31).
|
||||
|
||||
% Release notes include only features, enhancements, and fixes. Add breaking changes, deprecations, and known issues to the applicable release notes sections.
|
||||
|
||||
% ## version.next [elasticsearch-javascript-client-next-release-notes]
|
||||
% **Release date:** Month day, year
|
||||
|
||||
% ### Features and enhancements [elasticsearch-javascript-client-next-features-enhancements]
|
||||
% *
|
||||
|
||||
% ### Fixes [elasticsearch-javascript-client-next-fixes]
|
||||
% *
|
||||
|
||||
## 9.0.0 [elasticsearch-javascript-client-900-release-notes]
|
||||
**Release date:** March 25, 2025
|
||||
|
||||
### Features and enhancements [elasticsearch-javascript-client-900-features-enhancements]
|
||||
|
||||
### Fixes [elasticsearch-javascript-client-900-fixes]
|
||||
20
docs/release-notes/known-issues.md
Normal file
20
docs/release-notes/known-issues.md
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
navigation_title: "Elasticsearch JavaScript Client"
|
||||
|
||||
---
|
||||
|
||||
# Elasticsearch JavaScript Client known issues [elasticsearch-javascript-client-known-issues]
|
||||
|
||||
% Use the following template to add entries to this page.
|
||||
|
||||
% :::{dropdown} Title of known issue
|
||||
% **Details**
|
||||
% On [Month/Day/Year], a known issue was discovered that [description of known issue].
|
||||
|
||||
% **Workaround**
|
||||
% Workaround description.
|
||||
|
||||
% **Resolved**
|
||||
% On [Month/Day/Year], this issue was resolved.
|
||||
|
||||
:::
|
||||
5
docs/release-notes/toc.yml
Normal file
5
docs/release-notes/toc.yml
Normal file
@ -0,0 +1,5 @@
|
||||
toc:
|
||||
- file: index.md
|
||||
- file: known-issues.md
|
||||
- file: breaking-changes.md
|
||||
- file: deprecations.md
|
||||
@ -1,158 +0,0 @@
|
||||
[[client-testing]]
|
||||
=== Testing
|
||||
|
||||
Testing is one of the most important parts of developing an application.
|
||||
The client is very flexible when it comes to testing and is compatible with
|
||||
most testing frameworks (such as https://www.npmjs.com/package/ava[`ava`],
|
||||
which is used in the examples below).
|
||||
|
||||
If you are using this client, you are most likely working with {es}, and one of
|
||||
the first issues you face is how to test your application. A perfectly valid
|
||||
solution is to use the real {es} instance for testing your application, but you
|
||||
would be doing an integration test, while you want a unit test. There are many
|
||||
ways to solve this problem, you could create the database with Docker, or use an
|
||||
in-memory compatible one, but if you are writing unit tests that can be easily
|
||||
parallelized this becomes quite uncomfortable. A different way of improving your
|
||||
testing experience while doing unit tests is to use a mock.
|
||||
|
||||
The client is designed to be easy to extend and adapt to your needs. Thanks to
|
||||
its internal architecture it allows you to change some specific components while
|
||||
keeping the rest of it working as usual. Each {es} official client is composed
|
||||
of the following components:
|
||||
|
||||
* `API layer`: every {es} API that you can call.
|
||||
* `Transport`: a component that takes care of preparing a request before sending
|
||||
it and handling all the retry and sniffing strategies.
|
||||
* `ConnectionPool`: {es} is a cluster and might have multiple nodes, the
|
||||
`ConnectionPool` takes care of them.
|
||||
* `Serializer`: A class with all the serialization strategies, from the basic
|
||||
JSON to the new line delimited JSON.
|
||||
* `Connection`: The actual HTTP library.
|
||||
|
||||
The best way to mock {es} with the official clients is to replace the
|
||||
`Connection` component since it has very few responsibilities and it does not
|
||||
interact with other internal components other than getting requests and
|
||||
returning responses.
|
||||
|
||||
|
||||
[discrete]
|
||||
==== `@elastic/elasticsearch-mock`
|
||||
|
||||
Writing each time a mock for your test can be annoying and error-prone, so we
|
||||
have built a simple yet powerful mocking library specifically designed for this
|
||||
client, and you can install it with the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install @elastic/elasticsearch-mock --save-dev
|
||||
----
|
||||
|
||||
With this library you can create custom mocks for any request you can send to
|
||||
{es}. It offers a simple and intuitive API and it mocks only the HTTP layer,
|
||||
leaving the rest of the client working as usual.
|
||||
|
||||
Before showing all of its features, and what you can do with it, let’s see an
|
||||
example:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const Mock = require('@elastic/elasticsearch-mock')
|
||||
|
||||
const mock = new Mock()
|
||||
const client = new Client({
|
||||
cloud: { id: '<cloud-id>' },
|
||||
auth: { apiKey: 'base64EncodedKey' },
|
||||
Connection: mock.getConnection()
|
||||
})
|
||||
|
||||
mock.add({
|
||||
method: 'GET',
|
||||
path: '/'
|
||||
}, () => {
|
||||
return { status: 'ok' }
|
||||
})
|
||||
|
||||
client.info().then(console.log, console.log)
|
||||
----
|
||||
|
||||
As you can see it works closely with the client itself, once you have created a
|
||||
new instance of the mock library you just need to call the mock.getConnection()
|
||||
method and pass its result to the Connection option of the client. From now on,
|
||||
every request is handled by the mock library, and the HTTP layer will never be
|
||||
touched. As a result, your test is significantly faster and you are able to
|
||||
easily parallelize them!
|
||||
|
||||
The library allows you to write both “strict” and “loose” mocks, which means
|
||||
that you can write a mock that handles a very specific request or be looser and
|
||||
handle a group of request, let’s see this in action:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
mock.add({
|
||||
method: 'POST',
|
||||
path: '/indexName/_search'
|
||||
}, () => {
|
||||
return {
|
||||
hits: {
|
||||
total: { value: 1, relation: 'eq' },
|
||||
hits: [{ _source: { baz: 'faz' } }]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
mock.add({
|
||||
method: 'POST',
|
||||
path: '/indexName/_search',
|
||||
body: { query: { match: { foo: 'bar' } } }
|
||||
}, () => {
|
||||
return {
|
||||
hits: {
|
||||
total: { value: 0, relation: 'eq' },
|
||||
hits: []
|
||||
}
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
In the example above, every search request gets the first response, while every
|
||||
search request that uses the query described in the second mock gets the second
|
||||
response.
|
||||
|
||||
You can also specify dynamic paths:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
mock.add({
|
||||
method: 'GET',
|
||||
path: '/:index/_count'
|
||||
}, () => {
|
||||
return { count: 42 }
|
||||
})
|
||||
|
||||
client.count({ index: 'foo' }).then(console.log, console.log) // => { count: 42 }
|
||||
client.count({ index: 'bar' }).then(console.log, console.log) // => { count: 42 }
|
||||
----
|
||||
|
||||
And wildcards are supported as well.
|
||||
|
||||
Another very interesting use case is the ability to create a test that randomly
|
||||
fails to see how your code reacts to failures:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
mock.add({
|
||||
method: 'GET',
|
||||
path: '/:index/_count'
|
||||
}, () => {
|
||||
if (Math.random() > 0.8) {
|
||||
return ResponseError({ body: {}, statusCode: 500 })
|
||||
} else {
|
||||
return { count: 42 }
|
||||
}
|
||||
})
|
||||
----
|
||||
|
||||
We have seen how simple is mocking {es} and testing your application, you can
|
||||
find many more features and examples in the
|
||||
https://github.com/elastic/elasticsearch-js-mock[module documentation].
|
||||
@ -1,8 +0,0 @@
|
||||
[[timeout-best-practices]]
|
||||
=== Timeout best practices
|
||||
|
||||
Starting in 9.0.0, this client is configured to not time out any HTTP request by default. {es} will always eventually respond to any request, even if it takes several minutes. Reissuing a request that it has not responded to yet can cause performance side effects. See the {ref}/modules-network.html#_http_client_configuration[official {es} recommendations for HTTP clients] for more information.
|
||||
|
||||
Prior to 9.0, this client was configured by default to operate like many HTTP client libraries do, by using a relatively short (30 second) timeout on all requests sent to {es}, raising a `TimeoutError` when that time period elapsed without receiving a response.
|
||||
|
||||
If your circumstances require you to set timeouts on Elasticsearch requests, setting the `requestTimeout` value to a millisecond value will cause this client to operate as it did prior to 9.0.
|
||||
@ -1,73 +0,0 @@
|
||||
[[transport]]
|
||||
=== Transport
|
||||
|
||||
This class is responsible for performing the request to {es} and handling
|
||||
errors, it also handles sniffing.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const { Transport } = require('@elastic/transport')
|
||||
|
||||
class MyTransport extends Transport {
|
||||
request (params, options, callback) {
|
||||
// your code
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
Transport: MyTransport
|
||||
})
|
||||
----
|
||||
|
||||
Sometimes you need to inject a small snippet of your code and then continue to
|
||||
use the usual client code. In such cases, call `super.method`:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
class MyTransport extends Transport {
|
||||
request (params, options, callback) {
|
||||
// your code
|
||||
return super.request(params, options, callback)
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
==== Supported content types
|
||||
|
||||
Depending on the `content-type` of the response, the transport will return the body as different types:
|
||||
|
||||
[cols="1,1"]
|
||||
|===
|
||||
|Content-Type |JavaScript type
|
||||
|
||||
|`application/json`
|
||||
|`object`
|
||||
|
||||
|`text/plain`
|
||||
|`string`
|
||||
|
||||
|`application/vnd.elasticsearch+json`
|
||||
|`object`
|
||||
|
||||
|`application/vnd.mapbox-vector-tile`
|
||||
|`Buffer`
|
||||
|
||||
|`application/vnd.apache.arrow.stream`
|
||||
|`Buffer`
|
||||
|
||||
|`application/vnd.elasticsearch+arrow+stream`
|
||||
|`Buffer`
|
||||
|
||||
|`application/smile`
|
||||
|`Buffer`
|
||||
|
||||
|`application/vnd.elasticsearch+smile`
|
||||
|`Buffer`
|
||||
|
||||
|`application/cbor`
|
||||
|`Buffer`
|
||||
|
||||
|`application/vnd.elasticsearch+cbor`
|
||||
|`Buffer`
|
||||
|===
|
||||
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@elastic/elasticsearch",
|
||||
"version": "9.0.0-alpha.4",
|
||||
"version": "9.0.0-alpha.3",
|
||||
"versionCanary": "9.0.0-canary.0",
|
||||
"description": "The official Elasticsearch client for Node.js",
|
||||
"main": "./index.js",
|
||||
|
||||
@ -35,133 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class AsyncSearch {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'async_search.delete': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'async_search.get': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'keep_alive',
|
||||
'typed_keys',
|
||||
'wait_for_completion_timeout'
|
||||
]
|
||||
},
|
||||
'async_search.status': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'keep_alive'
|
||||
]
|
||||
},
|
||||
'async_search.submit': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'aggregations',
|
||||
'aggs',
|
||||
'collapse',
|
||||
'explain',
|
||||
'ext',
|
||||
'from',
|
||||
'highlight',
|
||||
'track_total_hits',
|
||||
'indices_boost',
|
||||
'docvalue_fields',
|
||||
'knn',
|
||||
'min_score',
|
||||
'post_filter',
|
||||
'profile',
|
||||
'query',
|
||||
'rescore',
|
||||
'script_fields',
|
||||
'search_after',
|
||||
'size',
|
||||
'slice',
|
||||
'sort',
|
||||
'_source',
|
||||
'fields',
|
||||
'suggest',
|
||||
'terminate_after',
|
||||
'timeout',
|
||||
'track_scores',
|
||||
'version',
|
||||
'seq_no_primary_term',
|
||||
'stored_fields',
|
||||
'pit',
|
||||
'runtime_mappings',
|
||||
'stats'
|
||||
],
|
||||
query: [
|
||||
'wait_for_completion_timeout',
|
||||
'keep_alive',
|
||||
'keep_on_completion',
|
||||
'allow_no_indices',
|
||||
'allow_partial_search_results',
|
||||
'analyzer',
|
||||
'analyze_wildcard',
|
||||
'batched_reduce_size',
|
||||
'ccs_minimize_roundtrips',
|
||||
'default_operator',
|
||||
'df',
|
||||
'docvalue_fields',
|
||||
'expand_wildcards',
|
||||
'explain',
|
||||
'ignore_throttled',
|
||||
'ignore_unavailable',
|
||||
'lenient',
|
||||
'max_concurrent_shard_requests',
|
||||
'preference',
|
||||
'request_cache',
|
||||
'routing',
|
||||
'search_type',
|
||||
'stats',
|
||||
'stored_fields',
|
||||
'suggest_field',
|
||||
'suggest_mode',
|
||||
'suggest_size',
|
||||
'suggest_text',
|
||||
'terminate_after',
|
||||
'timeout',
|
||||
'track_total_hits',
|
||||
'track_scores',
|
||||
'typed_keys',
|
||||
'rest_total_hits_as_int',
|
||||
'version',
|
||||
'_source',
|
||||
'_source_excludes',
|
||||
'_source_includes',
|
||||
'seq_no_primary_term',
|
||||
'q',
|
||||
'size',
|
||||
'from',
|
||||
'sort'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -172,10 +51,7 @@ export default class AsyncSearch {
|
||||
async delete (this: That, params: T.AsyncSearchDeleteRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchDeleteResponse, unknown>>
|
||||
async delete (this: That, params: T.AsyncSearchDeleteRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchDeleteResponse>
|
||||
async delete (this: That, params: T.AsyncSearchDeleteRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['async_search.delete']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -217,10 +93,7 @@ export default class AsyncSearch {
|
||||
async get<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params: T.AsyncSearchGetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchGetResponse<TDocument, TAggregations>, unknown>>
|
||||
async get<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params: T.AsyncSearchGetRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchGetResponse<TDocument, TAggregations>>
|
||||
async get<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params: T.AsyncSearchGetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['async_search.get']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -262,10 +135,7 @@ export default class AsyncSearch {
|
||||
async status (this: That, params: T.AsyncSearchStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchStatusResponse, unknown>>
|
||||
async status (this: That, params: T.AsyncSearchStatusRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchStatusResponse>
|
||||
async status (this: That, params: T.AsyncSearchStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['async_search.status']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -307,12 +177,8 @@ export default class AsyncSearch {
|
||||
async submit<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params?: T.AsyncSearchSubmitRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AsyncSearchSubmitResponse<TDocument, TAggregations>, unknown>>
|
||||
async submit<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params?: T.AsyncSearchSubmitRequest, options?: TransportRequestOptions): Promise<T.AsyncSearchSubmitResponse<TDocument, TAggregations>>
|
||||
async submit<TDocument = unknown, TAggregations = Record<T.AggregateName, T.AggregationsAggregate>> (this: That, params?: T.AsyncSearchSubmitRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['async_search.submit']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'ext', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'knn', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -339,14 +205,8 @@ export default class AsyncSearch {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,59 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Autoscaling {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'autoscaling.delete_autoscaling_policy': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'autoscaling.get_autoscaling_capacity': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'autoscaling.get_autoscaling_policy': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'autoscaling.put_autoscaling_policy': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [
|
||||
'policy'
|
||||
],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -98,10 +51,7 @@ export default class Autoscaling {
|
||||
async deleteAutoscalingPolicy (this: That, params: T.AutoscalingDeleteAutoscalingPolicyRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AutoscalingDeleteAutoscalingPolicyResponse, unknown>>
|
||||
async deleteAutoscalingPolicy (this: That, params: T.AutoscalingDeleteAutoscalingPolicyRequest, options?: TransportRequestOptions): Promise<T.AutoscalingDeleteAutoscalingPolicyResponse>
|
||||
async deleteAutoscalingPolicy (this: That, params: T.AutoscalingDeleteAutoscalingPolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['autoscaling.delete_autoscaling_policy']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -143,10 +93,7 @@ export default class Autoscaling {
|
||||
async getAutoscalingCapacity (this: That, params?: T.AutoscalingGetAutoscalingCapacityRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AutoscalingGetAutoscalingCapacityResponse, unknown>>
|
||||
async getAutoscalingCapacity (this: That, params?: T.AutoscalingGetAutoscalingCapacityRequest, options?: TransportRequestOptions): Promise<T.AutoscalingGetAutoscalingCapacityResponse>
|
||||
async getAutoscalingCapacity (this: That, params?: T.AutoscalingGetAutoscalingCapacityRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['autoscaling.get_autoscaling_capacity']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -186,10 +133,7 @@ export default class Autoscaling {
|
||||
async getAutoscalingPolicy (this: That, params: T.AutoscalingGetAutoscalingPolicyRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AutoscalingGetAutoscalingPolicyResponse, unknown>>
|
||||
async getAutoscalingPolicy (this: That, params: T.AutoscalingGetAutoscalingPolicyRequest, options?: TransportRequestOptions): Promise<T.AutoscalingGetAutoscalingPolicyResponse>
|
||||
async getAutoscalingPolicy (this: That, params: T.AutoscalingGetAutoscalingPolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['autoscaling.get_autoscaling_policy']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -231,12 +175,8 @@ export default class Autoscaling {
|
||||
async putAutoscalingPolicy (this: That, params: T.AutoscalingPutAutoscalingPolicyRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.AutoscalingPutAutoscalingPolicyResponse, unknown>>
|
||||
async putAutoscalingPolicy (this: That, params: T.AutoscalingPutAutoscalingPolicyRequest, options?: TransportRequestOptions): Promise<T.AutoscalingPutAutoscalingPolicyResponse>
|
||||
async putAutoscalingPolicy (this: That, params: T.AutoscalingPutAutoscalingPolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['autoscaling.put_autoscaling_policy']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const acceptedBody: string[] = ['policy']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -248,14 +188,8 @@ export default class Autoscaling {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -35,18 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
capabilities: {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Checks if the specified combination of method, API, parameters, and arbitrary capabilities are supported
|
||||
@ -56,10 +45,7 @@ export default async function CapabilitiesApi (this: That, params?: T.TODO, opti
|
||||
export default async function CapabilitiesApi (this: That, params?: T.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
export default async function CapabilitiesApi (this: That, params?: T.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
export default async function CapabilitiesApi (this: That, params?: T.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.capabilities
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,336 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Cat {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'cat.aliases': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'expand_wildcards',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.allocation': {
|
||||
path: [
|
||||
'node_id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'bytes',
|
||||
'h',
|
||||
's',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.component_templates': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.count': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's'
|
||||
]
|
||||
},
|
||||
'cat.fielddata': {
|
||||
path: [
|
||||
'fields'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'bytes',
|
||||
'fields',
|
||||
'h',
|
||||
's'
|
||||
]
|
||||
},
|
||||
'cat.health': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'time',
|
||||
'ts',
|
||||
'h',
|
||||
's'
|
||||
]
|
||||
},
|
||||
'cat.help': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'cat.indices': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'bytes',
|
||||
'expand_wildcards',
|
||||
'health',
|
||||
'include_unloaded_segments',
|
||||
'pri',
|
||||
'time',
|
||||
'master_timeout',
|
||||
'h',
|
||||
's'
|
||||
]
|
||||
},
|
||||
'cat.master': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.ml_data_frame_analytics': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'allow_no_match',
|
||||
'bytes',
|
||||
'h',
|
||||
's',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.ml_datafeeds': {
|
||||
path: [
|
||||
'datafeed_id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'allow_no_match',
|
||||
'h',
|
||||
's',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.ml_jobs': {
|
||||
path: [
|
||||
'job_id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'allow_no_match',
|
||||
'bytes',
|
||||
'h',
|
||||
's',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.ml_trained_models': {
|
||||
path: [
|
||||
'model_id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'allow_no_match',
|
||||
'bytes',
|
||||
'h',
|
||||
's',
|
||||
'from',
|
||||
'size',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.nodeattrs': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.nodes': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'bytes',
|
||||
'full_id',
|
||||
'include_unloaded_segments',
|
||||
'h',
|
||||
's',
|
||||
'master_timeout',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.pending_tasks': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'local',
|
||||
'master_timeout',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.plugins': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'include_bootstrap',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.recovery': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'active_only',
|
||||
'bytes',
|
||||
'detailed',
|
||||
'index',
|
||||
'h',
|
||||
's',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.repositories': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.segments': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'bytes',
|
||||
'h',
|
||||
's',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.shards': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'bytes',
|
||||
'h',
|
||||
's',
|
||||
'master_timeout',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.snapshots': {
|
||||
path: [
|
||||
'repository'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'ignore_unavailable',
|
||||
'h',
|
||||
's',
|
||||
'master_timeout',
|
||||
'time'
|
||||
]
|
||||
},
|
||||
'cat.tasks': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'actions',
|
||||
'detailed',
|
||||
'nodes',
|
||||
'parent_task_id',
|
||||
'h',
|
||||
's',
|
||||
'time',
|
||||
'timeout',
|
||||
'wait_for_completion'
|
||||
]
|
||||
},
|
||||
'cat.templates': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.thread_pool': {
|
||||
path: [
|
||||
'thread_pool_patterns'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'h',
|
||||
's',
|
||||
'time',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cat.transforms': {
|
||||
path: [
|
||||
'transform_id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'allow_no_match',
|
||||
'from',
|
||||
'h',
|
||||
's',
|
||||
'time',
|
||||
'size'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -375,10 +51,7 @@ export default class Cat {
|
||||
async aliases (this: That, params?: T.CatAliasesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatAliasesResponse, unknown>>
|
||||
async aliases (this: That, params?: T.CatAliasesRequest, options?: TransportRequestOptions): Promise<T.CatAliasesResponse>
|
||||
async aliases (this: That, params?: T.CatAliasesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.aliases']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -428,10 +101,7 @@ export default class Cat {
|
||||
async allocation (this: That, params?: T.CatAllocationRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatAllocationResponse, unknown>>
|
||||
async allocation (this: That, params?: T.CatAllocationRequest, options?: TransportRequestOptions): Promise<T.CatAllocationResponse>
|
||||
async allocation (this: That, params?: T.CatAllocationRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.allocation']
|
||||
|
||||
const acceptedPath: string[] = ['node_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -481,10 +151,7 @@ export default class Cat {
|
||||
async componentTemplates (this: That, params?: T.CatComponentTemplatesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatComponentTemplatesResponse, unknown>>
|
||||
async componentTemplates (this: That, params?: T.CatComponentTemplatesRequest, options?: TransportRequestOptions): Promise<T.CatComponentTemplatesResponse>
|
||||
async componentTemplates (this: That, params?: T.CatComponentTemplatesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.component_templates']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -534,10 +201,7 @@ export default class Cat {
|
||||
async count (this: That, params?: T.CatCountRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatCountResponse, unknown>>
|
||||
async count (this: That, params?: T.CatCountRequest, options?: TransportRequestOptions): Promise<T.CatCountResponse>
|
||||
async count (this: That, params?: T.CatCountRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.count']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -587,10 +251,7 @@ export default class Cat {
|
||||
async fielddata (this: That, params?: T.CatFielddataRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatFielddataResponse, unknown>>
|
||||
async fielddata (this: That, params?: T.CatFielddataRequest, options?: TransportRequestOptions): Promise<T.CatFielddataResponse>
|
||||
async fielddata (this: That, params?: T.CatFielddataRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.fielddata']
|
||||
|
||||
const acceptedPath: string[] = ['fields']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -640,10 +301,7 @@ export default class Cat {
|
||||
async health (this: That, params?: T.CatHealthRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatHealthResponse, unknown>>
|
||||
async health (this: That, params?: T.CatHealthRequest, options?: TransportRequestOptions): Promise<T.CatHealthResponse>
|
||||
async health (this: That, params?: T.CatHealthRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.health']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -683,10 +341,7 @@ export default class Cat {
|
||||
async help (this: That, params?: T.CatHelpRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatHelpResponse, unknown>>
|
||||
async help (this: That, params?: T.CatHelpRequest, options?: TransportRequestOptions): Promise<T.CatHelpResponse>
|
||||
async help (this: That, params?: T.CatHelpRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.help']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -726,10 +381,7 @@ export default class Cat {
|
||||
async indices (this: That, params?: T.CatIndicesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatIndicesResponse, unknown>>
|
||||
async indices (this: That, params?: T.CatIndicesRequest, options?: TransportRequestOptions): Promise<T.CatIndicesResponse>
|
||||
async indices (this: That, params?: T.CatIndicesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.indices']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -779,10 +431,7 @@ export default class Cat {
|
||||
async master (this: That, params?: T.CatMasterRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatMasterResponse, unknown>>
|
||||
async master (this: That, params?: T.CatMasterRequest, options?: TransportRequestOptions): Promise<T.CatMasterResponse>
|
||||
async master (this: That, params?: T.CatMasterRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.master']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -822,10 +471,7 @@ export default class Cat {
|
||||
async mlDataFrameAnalytics (this: That, params?: T.CatMlDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatMlDataFrameAnalyticsResponse, unknown>>
|
||||
async mlDataFrameAnalytics (this: That, params?: T.CatMlDataFrameAnalyticsRequest, options?: TransportRequestOptions): Promise<T.CatMlDataFrameAnalyticsResponse>
|
||||
async mlDataFrameAnalytics (this: That, params?: T.CatMlDataFrameAnalyticsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.ml_data_frame_analytics']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -875,10 +521,7 @@ export default class Cat {
|
||||
async mlDatafeeds (this: That, params?: T.CatMlDatafeedsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatMlDatafeedsResponse, unknown>>
|
||||
async mlDatafeeds (this: That, params?: T.CatMlDatafeedsRequest, options?: TransportRequestOptions): Promise<T.CatMlDatafeedsResponse>
|
||||
async mlDatafeeds (this: That, params?: T.CatMlDatafeedsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.ml_datafeeds']
|
||||
|
||||
const acceptedPath: string[] = ['datafeed_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -928,10 +571,7 @@ export default class Cat {
|
||||
async mlJobs (this: That, params?: T.CatMlJobsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatMlJobsResponse, unknown>>
|
||||
async mlJobs (this: That, params?: T.CatMlJobsRequest, options?: TransportRequestOptions): Promise<T.CatMlJobsResponse>
|
||||
async mlJobs (this: That, params?: T.CatMlJobsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.ml_jobs']
|
||||
|
||||
const acceptedPath: string[] = ['job_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -981,10 +621,7 @@ export default class Cat {
|
||||
async mlTrainedModels (this: That, params?: T.CatMlTrainedModelsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatMlTrainedModelsResponse, unknown>>
|
||||
async mlTrainedModels (this: That, params?: T.CatMlTrainedModelsRequest, options?: TransportRequestOptions): Promise<T.CatMlTrainedModelsResponse>
|
||||
async mlTrainedModels (this: That, params?: T.CatMlTrainedModelsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.ml_trained_models']
|
||||
|
||||
const acceptedPath: string[] = ['model_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1034,10 +671,7 @@ export default class Cat {
|
||||
async nodeattrs (this: That, params?: T.CatNodeattrsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatNodeattrsResponse, unknown>>
|
||||
async nodeattrs (this: That, params?: T.CatNodeattrsRequest, options?: TransportRequestOptions): Promise<T.CatNodeattrsResponse>
|
||||
async nodeattrs (this: That, params?: T.CatNodeattrsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.nodeattrs']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1077,10 +711,7 @@ export default class Cat {
|
||||
async nodes (this: That, params?: T.CatNodesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatNodesResponse, unknown>>
|
||||
async nodes (this: That, params?: T.CatNodesRequest, options?: TransportRequestOptions): Promise<T.CatNodesResponse>
|
||||
async nodes (this: That, params?: T.CatNodesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.nodes']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1120,10 +751,7 @@ export default class Cat {
|
||||
async pendingTasks (this: That, params?: T.CatPendingTasksRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatPendingTasksResponse, unknown>>
|
||||
async pendingTasks (this: That, params?: T.CatPendingTasksRequest, options?: TransportRequestOptions): Promise<T.CatPendingTasksResponse>
|
||||
async pendingTasks (this: That, params?: T.CatPendingTasksRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.pending_tasks']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1163,10 +791,7 @@ export default class Cat {
|
||||
async plugins (this: That, params?: T.CatPluginsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatPluginsResponse, unknown>>
|
||||
async plugins (this: That, params?: T.CatPluginsRequest, options?: TransportRequestOptions): Promise<T.CatPluginsResponse>
|
||||
async plugins (this: That, params?: T.CatPluginsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.plugins']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1206,10 +831,7 @@ export default class Cat {
|
||||
async recovery (this: That, params?: T.CatRecoveryRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatRecoveryResponse, unknown>>
|
||||
async recovery (this: That, params?: T.CatRecoveryRequest, options?: TransportRequestOptions): Promise<T.CatRecoveryResponse>
|
||||
async recovery (this: That, params?: T.CatRecoveryRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.recovery']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1259,10 +881,7 @@ export default class Cat {
|
||||
async repositories (this: That, params?: T.CatRepositoriesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatRepositoriesResponse, unknown>>
|
||||
async repositories (this: That, params?: T.CatRepositoriesRequest, options?: TransportRequestOptions): Promise<T.CatRepositoriesResponse>
|
||||
async repositories (this: That, params?: T.CatRepositoriesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.repositories']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1302,10 +921,7 @@ export default class Cat {
|
||||
async segments (this: That, params?: T.CatSegmentsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatSegmentsResponse, unknown>>
|
||||
async segments (this: That, params?: T.CatSegmentsRequest, options?: TransportRequestOptions): Promise<T.CatSegmentsResponse>
|
||||
async segments (this: That, params?: T.CatSegmentsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.segments']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1355,10 +971,7 @@ export default class Cat {
|
||||
async shards (this: That, params?: T.CatShardsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatShardsResponse, unknown>>
|
||||
async shards (this: That, params?: T.CatShardsRequest, options?: TransportRequestOptions): Promise<T.CatShardsResponse>
|
||||
async shards (this: That, params?: T.CatShardsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.shards']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1408,10 +1021,7 @@ export default class Cat {
|
||||
async snapshots (this: That, params?: T.CatSnapshotsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatSnapshotsResponse, unknown>>
|
||||
async snapshots (this: That, params?: T.CatSnapshotsRequest, options?: TransportRequestOptions): Promise<T.CatSnapshotsResponse>
|
||||
async snapshots (this: That, params?: T.CatSnapshotsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.snapshots']
|
||||
|
||||
const acceptedPath: string[] = ['repository']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1461,10 +1071,7 @@ export default class Cat {
|
||||
async tasks (this: That, params?: T.CatTasksRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatTasksResponse, unknown>>
|
||||
async tasks (this: That, params?: T.CatTasksRequest, options?: TransportRequestOptions): Promise<T.CatTasksResponse>
|
||||
async tasks (this: That, params?: T.CatTasksRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.tasks']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1504,10 +1111,7 @@ export default class Cat {
|
||||
async templates (this: That, params?: T.CatTemplatesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatTemplatesResponse, unknown>>
|
||||
async templates (this: That, params?: T.CatTemplatesRequest, options?: TransportRequestOptions): Promise<T.CatTemplatesResponse>
|
||||
async templates (this: That, params?: T.CatTemplatesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.templates']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1557,10 +1161,7 @@ export default class Cat {
|
||||
async threadPool (this: That, params?: T.CatThreadPoolRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatThreadPoolResponse, unknown>>
|
||||
async threadPool (this: That, params?: T.CatThreadPoolRequest, options?: TransportRequestOptions): Promise<T.CatThreadPoolResponse>
|
||||
async threadPool (this: That, params?: T.CatThreadPoolRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.thread_pool']
|
||||
|
||||
const acceptedPath: string[] = ['thread_pool_patterns']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -1610,10 +1211,7 @@ export default class Cat {
|
||||
async transforms (this: That, params?: T.CatTransformsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CatTransformsResponse, unknown>>
|
||||
async transforms (this: That, params?: T.CatTransformsRequest, options?: TransportRequestOptions): Promise<T.CatTransformsResponse>
|
||||
async transforms (this: That, params?: T.CatTransformsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cat.transforms']
|
||||
|
||||
const acceptedPath: string[] = ['transform_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,185 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Ccr {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'ccr.delete_auto_follow_pattern': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ccr.follow': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'data_stream_name',
|
||||
'leader_index',
|
||||
'max_outstanding_read_requests',
|
||||
'max_outstanding_write_requests',
|
||||
'max_read_request_operation_count',
|
||||
'max_read_request_size',
|
||||
'max_retry_delay',
|
||||
'max_write_buffer_count',
|
||||
'max_write_buffer_size',
|
||||
'max_write_request_operation_count',
|
||||
'max_write_request_size',
|
||||
'read_poll_timeout',
|
||||
'remote_cluster',
|
||||
'settings'
|
||||
],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'wait_for_active_shards'
|
||||
]
|
||||
},
|
||||
'ccr.follow_info': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ccr.follow_stats': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ccr.forget_follower': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'follower_cluster',
|
||||
'follower_index',
|
||||
'follower_index_uuid',
|
||||
'leader_remote_cluster'
|
||||
],
|
||||
query: [
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ccr.get_auto_follow_pattern': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ccr.pause_auto_follow_pattern': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ccr.pause_follow': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ccr.put_auto_follow_pattern': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [
|
||||
'remote_cluster',
|
||||
'follow_index_pattern',
|
||||
'leader_index_patterns',
|
||||
'leader_index_exclusion_patterns',
|
||||
'max_outstanding_read_requests',
|
||||
'settings',
|
||||
'max_outstanding_write_requests',
|
||||
'read_poll_timeout',
|
||||
'max_read_request_operation_count',
|
||||
'max_read_request_size',
|
||||
'max_retry_delay',
|
||||
'max_write_buffer_count',
|
||||
'max_write_buffer_size',
|
||||
'max_write_request_operation_count',
|
||||
'max_write_request_size'
|
||||
],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ccr.resume_auto_follow_pattern': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ccr.resume_follow': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'max_outstanding_read_requests',
|
||||
'max_outstanding_write_requests',
|
||||
'max_read_request_operation_count',
|
||||
'max_read_request_size',
|
||||
'max_retry_delay',
|
||||
'max_write_buffer_count',
|
||||
'max_write_buffer_size',
|
||||
'max_write_request_operation_count',
|
||||
'max_write_request_size',
|
||||
'read_poll_timeout'
|
||||
],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ccr.stats': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ccr.unfollow': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -224,10 +51,7 @@ export default class Ccr {
|
||||
async deleteAutoFollowPattern (this: That, params: T.CcrDeleteAutoFollowPatternRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrDeleteAutoFollowPatternResponse, unknown>>
|
||||
async deleteAutoFollowPattern (this: That, params: T.CcrDeleteAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<T.CcrDeleteAutoFollowPatternResponse>
|
||||
async deleteAutoFollowPattern (this: That, params: T.CcrDeleteAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.delete_auto_follow_pattern']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -269,12 +93,8 @@ export default class Ccr {
|
||||
async follow (this: That, params: T.CcrFollowRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrFollowResponse, unknown>>
|
||||
async follow (this: That, params: T.CcrFollowRequest, options?: TransportRequestOptions): Promise<T.CcrFollowResponse>
|
||||
async follow (this: That, params: T.CcrFollowRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ccr.follow']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['data_stream_name', 'leader_index', 'max_outstanding_read_requests', 'max_outstanding_write_requests', 'max_read_request_operation_count', 'max_read_request_size', 'max_retry_delay', 'max_write_buffer_count', 'max_write_buffer_size', 'max_write_request_operation_count', 'max_write_request_size', 'read_poll_timeout', 'remote_cluster', 'settings']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -296,14 +116,8 @@ export default class Ccr {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -326,10 +140,7 @@ export default class Ccr {
|
||||
async followInfo (this: That, params: T.CcrFollowInfoRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrFollowInfoResponse, unknown>>
|
||||
async followInfo (this: That, params: T.CcrFollowInfoRequest, options?: TransportRequestOptions): Promise<T.CcrFollowInfoResponse>
|
||||
async followInfo (this: That, params: T.CcrFollowInfoRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.follow_info']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -371,10 +182,7 @@ export default class Ccr {
|
||||
async followStats (this: That, params: T.CcrFollowStatsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrFollowStatsResponse, unknown>>
|
||||
async followStats (this: That, params: T.CcrFollowStatsRequest, options?: TransportRequestOptions): Promise<T.CcrFollowStatsResponse>
|
||||
async followStats (this: That, params: T.CcrFollowStatsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.follow_stats']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -416,12 +224,8 @@ export default class Ccr {
|
||||
async forgetFollower (this: That, params: T.CcrForgetFollowerRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrForgetFollowerResponse, unknown>>
|
||||
async forgetFollower (this: That, params: T.CcrForgetFollowerRequest, options?: TransportRequestOptions): Promise<T.CcrForgetFollowerResponse>
|
||||
async forgetFollower (this: That, params: T.CcrForgetFollowerRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ccr.forget_follower']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['follower_cluster', 'follower_index', 'follower_index_uuid', 'leader_remote_cluster']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -443,14 +247,8 @@ export default class Ccr {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -473,10 +271,7 @@ export default class Ccr {
|
||||
async getAutoFollowPattern (this: That, params?: T.CcrGetAutoFollowPatternRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrGetAutoFollowPatternResponse, unknown>>
|
||||
async getAutoFollowPattern (this: That, params?: T.CcrGetAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<T.CcrGetAutoFollowPatternResponse>
|
||||
async getAutoFollowPattern (this: That, params?: T.CcrGetAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.get_auto_follow_pattern']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -526,10 +321,7 @@ export default class Ccr {
|
||||
async pauseAutoFollowPattern (this: That, params: T.CcrPauseAutoFollowPatternRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrPauseAutoFollowPatternResponse, unknown>>
|
||||
async pauseAutoFollowPattern (this: That, params: T.CcrPauseAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<T.CcrPauseAutoFollowPatternResponse>
|
||||
async pauseAutoFollowPattern (this: That, params: T.CcrPauseAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.pause_auto_follow_pattern']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -571,10 +363,7 @@ export default class Ccr {
|
||||
async pauseFollow (this: That, params: T.CcrPauseFollowRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrPauseFollowResponse, unknown>>
|
||||
async pauseFollow (this: That, params: T.CcrPauseFollowRequest, options?: TransportRequestOptions): Promise<T.CcrPauseFollowResponse>
|
||||
async pauseFollow (this: That, params: T.CcrPauseFollowRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.pause_follow']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -616,12 +405,8 @@ export default class Ccr {
|
||||
async putAutoFollowPattern (this: That, params: T.CcrPutAutoFollowPatternRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrPutAutoFollowPatternResponse, unknown>>
|
||||
async putAutoFollowPattern (this: That, params: T.CcrPutAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<T.CcrPutAutoFollowPatternResponse>
|
||||
async putAutoFollowPattern (this: That, params: T.CcrPutAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ccr.put_auto_follow_pattern']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const acceptedBody: string[] = ['remote_cluster', 'follow_index_pattern', 'leader_index_patterns', 'leader_index_exclusion_patterns', 'max_outstanding_read_requests', 'settings', 'max_outstanding_write_requests', 'read_poll_timeout', 'max_read_request_operation_count', 'max_read_request_size', 'max_retry_delay', 'max_write_buffer_count', 'max_write_buffer_size', 'max_write_request_operation_count', 'max_write_request_size']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -643,14 +428,8 @@ export default class Ccr {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -673,10 +452,7 @@ export default class Ccr {
|
||||
async resumeAutoFollowPattern (this: That, params: T.CcrResumeAutoFollowPatternRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrResumeAutoFollowPatternResponse, unknown>>
|
||||
async resumeAutoFollowPattern (this: That, params: T.CcrResumeAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<T.CcrResumeAutoFollowPatternResponse>
|
||||
async resumeAutoFollowPattern (this: That, params: T.CcrResumeAutoFollowPatternRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.resume_auto_follow_pattern']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -718,12 +494,8 @@ export default class Ccr {
|
||||
async resumeFollow (this: That, params: T.CcrResumeFollowRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrResumeFollowResponse, unknown>>
|
||||
async resumeFollow (this: That, params: T.CcrResumeFollowRequest, options?: TransportRequestOptions): Promise<T.CcrResumeFollowResponse>
|
||||
async resumeFollow (this: That, params: T.CcrResumeFollowRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ccr.resume_follow']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['max_outstanding_read_requests', 'max_outstanding_write_requests', 'max_read_request_operation_count', 'max_read_request_size', 'max_retry_delay', 'max_write_buffer_count', 'max_write_buffer_size', 'max_write_request_operation_count', 'max_write_request_size', 'read_poll_timeout']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -745,14 +517,8 @@ export default class Ccr {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -775,10 +541,7 @@ export default class Ccr {
|
||||
async stats (this: That, params?: T.CcrStatsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrStatsResponse, unknown>>
|
||||
async stats (this: That, params?: T.CcrStatsRequest, options?: TransportRequestOptions): Promise<T.CcrStatsResponse>
|
||||
async stats (this: That, params?: T.CcrStatsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.stats']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -818,10 +581,7 @@ export default class Ccr {
|
||||
async unfollow (this: That, params: T.CcrUnfollowRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CcrUnfollowResponse, unknown>>
|
||||
async unfollow (this: That, params: T.CcrUnfollowRequest, options?: TransportRequestOptions): Promise<T.CcrUnfollowResponse>
|
||||
async unfollow (this: That, params: T.CcrUnfollowRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ccr.unfollow']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,22 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
clear_scroll: {
|
||||
path: [],
|
||||
body: [
|
||||
'scroll_id'
|
||||
],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Clear a scrolling search. Clear the search context and results for a scrolling search.
|
||||
@ -60,12 +45,8 @@ export default async function ClearScrollApi (this: That, params?: T.ClearScroll
|
||||
export default async function ClearScrollApi (this: That, params?: T.ClearScrollRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClearScrollResponse, unknown>>
|
||||
export default async function ClearScrollApi (this: That, params?: T.ClearScrollRequest, options?: TransportRequestOptions): Promise<T.ClearScrollResponse>
|
||||
export default async function ClearScrollApi (this: That, params?: T.ClearScrollRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = acceptedParams.clear_scroll
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['scroll_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -88,14 +69,8 @@ export default async function ClearScrollApi (this: That, params?: T.ClearScroll
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,22 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
close_point_in_time: {
|
||||
path: [],
|
||||
body: [
|
||||
'id'
|
||||
],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Close a point in time. A point in time must be opened explicitly before being used in search requests. The `keep_alive` parameter tells Elasticsearch how long it should persist. A point in time is automatically closed when the `keep_alive` period has elapsed. However, keeping points in time has a cost; close them as soon as they are no longer required for search requests.
|
||||
@ -60,12 +45,8 @@ export default async function ClosePointInTimeApi (this: That, params: T.ClosePo
|
||||
export default async function ClosePointInTimeApi (this: That, params: T.ClosePointInTimeRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClosePointInTimeResponse, unknown>>
|
||||
export default async function ClosePointInTimeApi (this: That, params: T.ClosePointInTimeRequest, options?: TransportRequestOptions): Promise<T.ClosePointInTimeResponse>
|
||||
export default async function ClosePointInTimeApi (this: That, params: T.ClosePointInTimeRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = acceptedParams.close_point_in_time
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -87,14 +68,8 @@ export default async function ClosePointInTimeApi (this: That, params: T.ClosePo
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,202 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Cluster {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'cluster.allocation_explain': {
|
||||
path: [],
|
||||
body: [
|
||||
'current_node',
|
||||
'index',
|
||||
'primary',
|
||||
'shard'
|
||||
],
|
||||
query: [
|
||||
'include_disk_info',
|
||||
'include_yes_decisions',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cluster.delete_component_template': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'cluster.delete_voting_config_exclusions': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'wait_for_removal'
|
||||
]
|
||||
},
|
||||
'cluster.exists_component_template': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'local'
|
||||
]
|
||||
},
|
||||
'cluster.get_component_template': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'flat_settings',
|
||||
'include_defaults',
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cluster.get_settings': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'flat_settings',
|
||||
'include_defaults',
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'cluster.health': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'expand_wildcards',
|
||||
'level',
|
||||
'local',
|
||||
'master_timeout',
|
||||
'timeout',
|
||||
'wait_for_active_shards',
|
||||
'wait_for_events',
|
||||
'wait_for_nodes',
|
||||
'wait_for_no_initializing_shards',
|
||||
'wait_for_no_relocating_shards',
|
||||
'wait_for_status'
|
||||
]
|
||||
},
|
||||
'cluster.info': {
|
||||
path: [
|
||||
'target'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'cluster.pending_tasks': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'local',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cluster.post_voting_config_exclusions': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'node_names',
|
||||
'node_ids',
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'cluster.put_component_template': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [
|
||||
'template',
|
||||
'version',
|
||||
'_meta',
|
||||
'deprecated'
|
||||
],
|
||||
query: [
|
||||
'create',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'cluster.put_settings': {
|
||||
path: [],
|
||||
body: [
|
||||
'persistent',
|
||||
'transient'
|
||||
],
|
||||
query: [
|
||||
'flat_settings',
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'cluster.remote_info': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'cluster.reroute': {
|
||||
path: [],
|
||||
body: [
|
||||
'commands'
|
||||
],
|
||||
query: [
|
||||
'dry_run',
|
||||
'explain',
|
||||
'metric',
|
||||
'retry_failed',
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'cluster.state': {
|
||||
path: [
|
||||
'metric',
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'allow_no_indices',
|
||||
'expand_wildcards',
|
||||
'flat_settings',
|
||||
'ignore_unavailable',
|
||||
'local',
|
||||
'master_timeout',
|
||||
'wait_for_metadata_version',
|
||||
'wait_for_timeout'
|
||||
]
|
||||
},
|
||||
'cluster.stats': {
|
||||
path: [
|
||||
'node_id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'include_remotes',
|
||||
'timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -241,12 +51,8 @@ export default class Cluster {
|
||||
async allocationExplain (this: That, params?: T.ClusterAllocationExplainRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterAllocationExplainResponse, unknown>>
|
||||
async allocationExplain (this: That, params?: T.ClusterAllocationExplainRequest, options?: TransportRequestOptions): Promise<T.ClusterAllocationExplainResponse>
|
||||
async allocationExplain (this: That, params?: T.ClusterAllocationExplainRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['cluster.allocation_explain']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['current_node', 'index', 'primary', 'shard']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -269,14 +75,8 @@ export default class Cluster {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -296,10 +96,7 @@ export default class Cluster {
|
||||
async deleteComponentTemplate (this: That, params: T.ClusterDeleteComponentTemplateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterDeleteComponentTemplateResponse, unknown>>
|
||||
async deleteComponentTemplate (this: That, params: T.ClusterDeleteComponentTemplateRequest, options?: TransportRequestOptions): Promise<T.ClusterDeleteComponentTemplateResponse>
|
||||
async deleteComponentTemplate (this: That, params: T.ClusterDeleteComponentTemplateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.delete_component_template']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -341,10 +138,7 @@ export default class Cluster {
|
||||
async deleteVotingConfigExclusions (this: That, params?: T.ClusterDeleteVotingConfigExclusionsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterDeleteVotingConfigExclusionsResponse, unknown>>
|
||||
async deleteVotingConfigExclusions (this: That, params?: T.ClusterDeleteVotingConfigExclusionsRequest, options?: TransportRequestOptions): Promise<T.ClusterDeleteVotingConfigExclusionsResponse>
|
||||
async deleteVotingConfigExclusions (this: That, params?: T.ClusterDeleteVotingConfigExclusionsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.delete_voting_config_exclusions']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -384,10 +178,7 @@ export default class Cluster {
|
||||
async existsComponentTemplate (this: That, params: T.ClusterExistsComponentTemplateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterExistsComponentTemplateResponse, unknown>>
|
||||
async existsComponentTemplate (this: That, params: T.ClusterExistsComponentTemplateRequest, options?: TransportRequestOptions): Promise<T.ClusterExistsComponentTemplateResponse>
|
||||
async existsComponentTemplate (this: That, params: T.ClusterExistsComponentTemplateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.exists_component_template']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -429,10 +220,7 @@ export default class Cluster {
|
||||
async getComponentTemplate (this: That, params?: T.ClusterGetComponentTemplateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterGetComponentTemplateResponse, unknown>>
|
||||
async getComponentTemplate (this: That, params?: T.ClusterGetComponentTemplateRequest, options?: TransportRequestOptions): Promise<T.ClusterGetComponentTemplateResponse>
|
||||
async getComponentTemplate (this: That, params?: T.ClusterGetComponentTemplateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.get_component_template']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -482,10 +270,7 @@ export default class Cluster {
|
||||
async getSettings (this: That, params?: T.ClusterGetSettingsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterGetSettingsResponse, unknown>>
|
||||
async getSettings (this: That, params?: T.ClusterGetSettingsRequest, options?: TransportRequestOptions): Promise<T.ClusterGetSettingsResponse>
|
||||
async getSettings (this: That, params?: T.ClusterGetSettingsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.get_settings']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -525,10 +310,7 @@ export default class Cluster {
|
||||
async health (this: That, params?: T.ClusterHealthRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterHealthResponse, unknown>>
|
||||
async health (this: That, params?: T.ClusterHealthRequest, options?: TransportRequestOptions): Promise<T.ClusterHealthResponse>
|
||||
async health (this: That, params?: T.ClusterHealthRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.health']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -578,10 +360,7 @@ export default class Cluster {
|
||||
async info (this: That, params: T.ClusterInfoRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterInfoResponse, unknown>>
|
||||
async info (this: That, params: T.ClusterInfoRequest, options?: TransportRequestOptions): Promise<T.ClusterInfoResponse>
|
||||
async info (this: That, params: T.ClusterInfoRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.info']
|
||||
|
||||
const acceptedPath: string[] = ['target']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -623,10 +402,7 @@ export default class Cluster {
|
||||
async pendingTasks (this: That, params?: T.ClusterPendingTasksRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterPendingTasksResponse, unknown>>
|
||||
async pendingTasks (this: That, params?: T.ClusterPendingTasksRequest, options?: TransportRequestOptions): Promise<T.ClusterPendingTasksResponse>
|
||||
async pendingTasks (this: That, params?: T.ClusterPendingTasksRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.pending_tasks']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -666,10 +442,7 @@ export default class Cluster {
|
||||
async postVotingConfigExclusions (this: That, params?: T.ClusterPostVotingConfigExclusionsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterPostVotingConfigExclusionsResponse, unknown>>
|
||||
async postVotingConfigExclusions (this: That, params?: T.ClusterPostVotingConfigExclusionsRequest, options?: TransportRequestOptions): Promise<T.ClusterPostVotingConfigExclusionsResponse>
|
||||
async postVotingConfigExclusions (this: That, params?: T.ClusterPostVotingConfigExclusionsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.post_voting_config_exclusions']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -709,12 +482,8 @@ export default class Cluster {
|
||||
async putComponentTemplate (this: That, params: T.ClusterPutComponentTemplateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterPutComponentTemplateResponse, unknown>>
|
||||
async putComponentTemplate (this: That, params: T.ClusterPutComponentTemplateRequest, options?: TransportRequestOptions): Promise<T.ClusterPutComponentTemplateResponse>
|
||||
async putComponentTemplate (this: That, params: T.ClusterPutComponentTemplateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['cluster.put_component_template']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const acceptedBody: string[] = ['template', 'version', '_meta', 'deprecated']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -736,14 +505,8 @@ export default class Cluster {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -766,12 +529,8 @@ export default class Cluster {
|
||||
async putSettings (this: That, params?: T.ClusterPutSettingsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterPutSettingsResponse, unknown>>
|
||||
async putSettings (this: That, params?: T.ClusterPutSettingsRequest, options?: TransportRequestOptions): Promise<T.ClusterPutSettingsResponse>
|
||||
async putSettings (this: That, params?: T.ClusterPutSettingsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['cluster.put_settings']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['persistent', 'transient']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -794,14 +553,8 @@ export default class Cluster {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -821,10 +574,7 @@ export default class Cluster {
|
||||
async remoteInfo (this: That, params?: T.ClusterRemoteInfoRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterRemoteInfoResponse, unknown>>
|
||||
async remoteInfo (this: That, params?: T.ClusterRemoteInfoRequest, options?: TransportRequestOptions): Promise<T.ClusterRemoteInfoResponse>
|
||||
async remoteInfo (this: That, params?: T.ClusterRemoteInfoRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.remote_info']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -864,12 +614,8 @@ export default class Cluster {
|
||||
async reroute (this: That, params?: T.ClusterRerouteRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterRerouteResponse, unknown>>
|
||||
async reroute (this: That, params?: T.ClusterRerouteRequest, options?: TransportRequestOptions): Promise<T.ClusterRerouteResponse>
|
||||
async reroute (this: That, params?: T.ClusterRerouteRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['cluster.reroute']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['commands']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -892,14 +638,8 @@ export default class Cluster {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -919,10 +659,7 @@ export default class Cluster {
|
||||
async state (this: That, params?: T.ClusterStateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterStateResponse, unknown>>
|
||||
async state (this: That, params?: T.ClusterStateRequest, options?: TransportRequestOptions): Promise<T.ClusterStateResponse>
|
||||
async state (this: That, params?: T.ClusterStateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.state']
|
||||
|
||||
const acceptedPath: string[] = ['metric', 'index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -976,10 +713,7 @@ export default class Cluster {
|
||||
async stats (this: That, params?: T.ClusterStatsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ClusterStatsResponse, unknown>>
|
||||
async stats (this: That, params?: T.ClusterStatsRequest, options?: TransportRequestOptions): Promise<T.ClusterStatsResponse>
|
||||
async stats (this: That, params?: T.ClusterStatsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['cluster.stats']
|
||||
|
||||
const acceptedPath: string[] = ['node_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -35,39 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
count: {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'query'
|
||||
],
|
||||
query: [
|
||||
'allow_no_indices',
|
||||
'analyzer',
|
||||
'analyze_wildcard',
|
||||
'default_operator',
|
||||
'df',
|
||||
'expand_wildcards',
|
||||
'ignore_throttled',
|
||||
'ignore_unavailable',
|
||||
'lenient',
|
||||
'min_score',
|
||||
'preference',
|
||||
'routing',
|
||||
'terminate_after',
|
||||
'q'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Count search results. Get the number of documents matching a query. The query can be provided either by using a simple query string as a parameter, or by defining Query DSL within the request body. The query is optional. When no query is provided, the API uses `match_all` to count all the documents. The count API supports multi-target syntax. You can run a single count API search across multiple data streams and indices. The operation is broadcast across all shards. For each shard ID group, a replica is chosen and the search is run against it. This means that replicas increase the scalability of the count.
|
||||
@ -77,12 +45,8 @@ export default async function CountApi (this: That, params?: T.CountRequest, opt
|
||||
export default async function CountApi (this: That, params?: T.CountRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.CountResponse, unknown>>
|
||||
export default async function CountApi (this: That, params?: T.CountRequest, options?: TransportRequestOptions): Promise<T.CountResponse>
|
||||
export default async function CountApi (this: That, params?: T.CountRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = acceptedParams.count
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['query']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -105,14 +69,8 @@ export default async function CountApi (this: That, params?: T.CountRequest, opt
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -35,46 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class DanglingIndices {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'dangling_indices.delete_dangling_index': {
|
||||
path: [
|
||||
'index_uuid'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'accept_data_loss',
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'dangling_indices.import_dangling_index': {
|
||||
path: [
|
||||
'index_uuid'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'accept_data_loss',
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'dangling_indices.list_dangling_indices': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -85,10 +51,7 @@ export default class DanglingIndices {
|
||||
async deleteDanglingIndex (this: That, params: T.DanglingIndicesDeleteDanglingIndexRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.DanglingIndicesDeleteDanglingIndexResponse, unknown>>
|
||||
async deleteDanglingIndex (this: That, params: T.DanglingIndicesDeleteDanglingIndexRequest, options?: TransportRequestOptions): Promise<T.DanglingIndicesDeleteDanglingIndexResponse>
|
||||
async deleteDanglingIndex (this: That, params: T.DanglingIndicesDeleteDanglingIndexRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['dangling_indices.delete_dangling_index']
|
||||
|
||||
const acceptedPath: string[] = ['index_uuid']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -130,10 +93,7 @@ export default class DanglingIndices {
|
||||
async importDanglingIndex (this: That, params: T.DanglingIndicesImportDanglingIndexRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.DanglingIndicesImportDanglingIndexResponse, unknown>>
|
||||
async importDanglingIndex (this: That, params: T.DanglingIndicesImportDanglingIndexRequest, options?: TransportRequestOptions): Promise<T.DanglingIndicesImportDanglingIndexResponse>
|
||||
async importDanglingIndex (this: That, params: T.DanglingIndicesImportDanglingIndexRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['dangling_indices.import_dangling_index']
|
||||
|
||||
const acceptedPath: string[] = ['index_uuid']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -175,10 +135,7 @@ export default class DanglingIndices {
|
||||
async listDanglingIndices (this: That, params?: T.DanglingIndicesListDanglingIndicesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.DanglingIndicesListDanglingIndicesResponse, unknown>>
|
||||
async listDanglingIndices (this: That, params?: T.DanglingIndicesListDanglingIndicesRequest, options?: TransportRequestOptions): Promise<T.DanglingIndicesListDanglingIndicesResponse>
|
||||
async listDanglingIndices (this: That, params?: T.DanglingIndicesListDanglingIndicesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['dangling_indices.list_dangling_indices']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,30 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
delete: {
|
||||
path: [
|
||||
'id',
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'if_primary_term',
|
||||
'if_seq_no',
|
||||
'refresh',
|
||||
'routing',
|
||||
'timeout',
|
||||
'version',
|
||||
'version_type',
|
||||
'wait_for_active_shards'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Delete a document. Remove a JSON document from the specified index. NOTE: You cannot send deletion requests directly to a data stream. To delete a document in a data stream, you must target the backing index containing the document. **Optimistic concurrency control** Delete operations can be made conditional and only be performed if the last modification to the document was assigned the sequence number and primary term specified by the `if_seq_no` and `if_primary_term` parameters. If a mismatch is detected, the operation will result in a `VersionConflictException` and a status code of `409`. **Versioning** Each document indexed is versioned. When deleting a document, the version can be specified to make sure the relevant document you are trying to delete is actually being deleted and it has not changed in the meantime. Every write operation run on a document, deletes included, causes its version to be incremented. The version number of a deleted document remains available for a short time after deletion to allow for control of concurrent operations. The length of time for which a deleted document's version remains available is determined by the `index.gc_deletes` index setting. **Routing** If routing is used during indexing, the routing value also needs to be specified to delete a document. If the `_routing` mapping is set to `required` and no routing value is specified, the delete API throws a `RoutingMissingException` and rejects the request. For example: ``` DELETE /my-index-000001/_doc/1?routing=shard-1 ``` This request deletes the document with ID 1, but it is routed based on the user. The document is not deleted if the correct routing is not specified. **Distributed** The delete operation gets hashed into a specific shard ID. It then gets redirected into the primary shard within that ID group and replicated (if needed) to shard replicas within that ID group.
|
||||
@ -68,10 +45,7 @@ export default async function DeleteApi (this: That, params: T.DeleteRequest, op
|
||||
export default async function DeleteApi (this: That, params: T.DeleteRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.DeleteResponse, unknown>>
|
||||
export default async function DeleteApi (this: That, params: T.DeleteRequest, options?: TransportRequestOptions): Promise<T.DeleteResponse>
|
||||
export default async function DeleteApi (this: That, params: T.DeleteRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.delete
|
||||
|
||||
const acceptedPath: string[] = ['id', 'index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@ -35,22 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
delete_by_query_rethrottle: {
|
||||
path: [
|
||||
'task_id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'requests_per_second'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Throttle a delete by query operation. Change the number of requests per second for a particular delete by query operation. Rethrottling that speeds up the query takes effect immediately but rethrotting that slows down the query takes effect after completing the current batch to prevent scroll timeouts.
|
||||
@ -60,10 +45,7 @@ export default async function DeleteByQueryRethrottleApi (this: That, params: T.
|
||||
export default async function DeleteByQueryRethrottleApi (this: That, params: T.DeleteByQueryRethrottleRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.DeleteByQueryRethrottleResponse, unknown>>
|
||||
export default async function DeleteByQueryRethrottleApi (this: That, params: T.DeleteByQueryRethrottleRequest, options?: TransportRequestOptions): Promise<T.DeleteByQueryRethrottleResponse>
|
||||
export default async function DeleteByQueryRethrottleApi (this: That, params: T.DeleteByQueryRethrottleRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.delete_by_query_rethrottle
|
||||
|
||||
const acceptedPath: string[] = ['task_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,23 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
delete_script: {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Delete a script or search template. Deletes a stored script or search template.
|
||||
@ -61,10 +45,7 @@ export default async function DeleteScriptApi (this: That, params: T.DeleteScrip
|
||||
export default async function DeleteScriptApi (this: That, params: T.DeleteScriptRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.DeleteScriptResponse, unknown>>
|
||||
export default async function DeleteScriptApi (this: That, params: T.DeleteScriptRequest, options?: TransportRequestOptions): Promise<T.DeleteScriptResponse>
|
||||
export default async function DeleteScriptApi (this: That, params: T.DeleteScriptRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.delete_script
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,69 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Enrich {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'enrich.delete_policy': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'enrich.execute_policy': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'wait_for_completion'
|
||||
]
|
||||
},
|
||||
'enrich.get_policy': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'enrich.put_policy': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [
|
||||
'geo_match',
|
||||
'match',
|
||||
'range'
|
||||
],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'enrich.stats': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -108,10 +51,7 @@ export default class Enrich {
|
||||
async deletePolicy (this: That, params: T.EnrichDeletePolicyRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EnrichDeletePolicyResponse, unknown>>
|
||||
async deletePolicy (this: That, params: T.EnrichDeletePolicyRequest, options?: TransportRequestOptions): Promise<T.EnrichDeletePolicyResponse>
|
||||
async deletePolicy (this: That, params: T.EnrichDeletePolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['enrich.delete_policy']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -153,10 +93,7 @@ export default class Enrich {
|
||||
async executePolicy (this: That, params: T.EnrichExecutePolicyRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EnrichExecutePolicyResponse, unknown>>
|
||||
async executePolicy (this: That, params: T.EnrichExecutePolicyRequest, options?: TransportRequestOptions): Promise<T.EnrichExecutePolicyResponse>
|
||||
async executePolicy (this: That, params: T.EnrichExecutePolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['enrich.execute_policy']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -198,10 +135,7 @@ export default class Enrich {
|
||||
async getPolicy (this: That, params?: T.EnrichGetPolicyRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EnrichGetPolicyResponse, unknown>>
|
||||
async getPolicy (this: That, params?: T.EnrichGetPolicyRequest, options?: TransportRequestOptions): Promise<T.EnrichGetPolicyResponse>
|
||||
async getPolicy (this: That, params?: T.EnrichGetPolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['enrich.get_policy']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -251,12 +185,8 @@ export default class Enrich {
|
||||
async putPolicy (this: That, params: T.EnrichPutPolicyRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EnrichPutPolicyResponse, unknown>>
|
||||
async putPolicy (this: That, params: T.EnrichPutPolicyRequest, options?: TransportRequestOptions): Promise<T.EnrichPutPolicyResponse>
|
||||
async putPolicy (this: That, params: T.EnrichPutPolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['enrich.put_policy']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const acceptedBody: string[] = ['geo_match', 'match', 'range']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -278,14 +208,8 @@ export default class Enrich {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -308,10 +232,7 @@ export default class Enrich {
|
||||
async stats (this: That, params?: T.EnrichStatsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EnrichStatsResponse, unknown>>
|
||||
async stats (this: That, params?: T.EnrichStatsRequest, options?: TransportRequestOptions): Promise<T.EnrichStatsResponse>
|
||||
async stats (this: That, params?: T.EnrichStatsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['enrich.stats']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,79 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Eql {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'eql.delete': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'eql.get': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'keep_alive',
|
||||
'wait_for_completion_timeout'
|
||||
]
|
||||
},
|
||||
'eql.get_status': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'eql.search': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'query',
|
||||
'case_sensitive',
|
||||
'event_category_field',
|
||||
'tiebreaker_field',
|
||||
'timestamp_field',
|
||||
'fetch_size',
|
||||
'filter',
|
||||
'keep_alive',
|
||||
'keep_on_completion',
|
||||
'wait_for_completion_timeout',
|
||||
'allow_partial_search_results',
|
||||
'allow_partial_sequence_results',
|
||||
'size',
|
||||
'fields',
|
||||
'result_position',
|
||||
'runtime_mappings',
|
||||
'max_samples_per_key'
|
||||
],
|
||||
query: [
|
||||
'allow_no_indices',
|
||||
'allow_partial_search_results',
|
||||
'allow_partial_sequence_results',
|
||||
'expand_wildcards',
|
||||
'ignore_unavailable',
|
||||
'keep_alive',
|
||||
'keep_on_completion',
|
||||
'wait_for_completion_timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -118,10 +51,7 @@ export default class Eql {
|
||||
async delete (this: That, params: T.EqlDeleteRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EqlDeleteResponse, unknown>>
|
||||
async delete (this: That, params: T.EqlDeleteRequest, options?: TransportRequestOptions): Promise<T.EqlDeleteResponse>
|
||||
async delete (this: That, params: T.EqlDeleteRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['eql.delete']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -163,10 +93,7 @@ export default class Eql {
|
||||
async get<TEvent = unknown> (this: That, params: T.EqlGetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EqlGetResponse<TEvent>, unknown>>
|
||||
async get<TEvent = unknown> (this: That, params: T.EqlGetRequest, options?: TransportRequestOptions): Promise<T.EqlGetResponse<TEvent>>
|
||||
async get<TEvent = unknown> (this: That, params: T.EqlGetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['eql.get']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -208,10 +135,7 @@ export default class Eql {
|
||||
async getStatus (this: That, params: T.EqlGetStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EqlGetStatusResponse, unknown>>
|
||||
async getStatus (this: That, params: T.EqlGetStatusRequest, options?: TransportRequestOptions): Promise<T.EqlGetStatusResponse>
|
||||
async getStatus (this: That, params: T.EqlGetStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['eql.get_status']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -253,12 +177,8 @@ export default class Eql {
|
||||
async search<TEvent = unknown> (this: That, params: T.EqlSearchRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EqlSearchResponse<TEvent>, unknown>>
|
||||
async search<TEvent = unknown> (this: That, params: T.EqlSearchRequest, options?: TransportRequestOptions): Promise<T.EqlSearchResponse<TEvent>>
|
||||
async search<TEvent = unknown> (this: That, params: T.EqlSearchRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['eql.search']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['query', 'case_sensitive', 'event_category_field', 'tiebreaker_field', 'timestamp_field', 'fetch_size', 'filter', 'keep_alive', 'keep_on_completion', 'wait_for_completion_timeout', 'allow_partial_search_results', 'allow_partial_sequence_results', 'size', 'fields', 'result_position', 'runtime_mappings', 'max_samples_per_key']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -280,14 +200,8 @@ export default class Eql {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,87 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Esql {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'esql.async_query': {
|
||||
path: [],
|
||||
body: [
|
||||
'columnar',
|
||||
'filter',
|
||||
'locale',
|
||||
'params',
|
||||
'profile',
|
||||
'query',
|
||||
'tables',
|
||||
'include_ccs_metadata'
|
||||
],
|
||||
query: [
|
||||
'delimiter',
|
||||
'drop_null_columns',
|
||||
'format',
|
||||
'keep_alive',
|
||||
'keep_on_completion',
|
||||
'wait_for_completion_timeout'
|
||||
]
|
||||
},
|
||||
'esql.async_query_delete': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'esql.async_query_get': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'drop_null_columns',
|
||||
'keep_alive',
|
||||
'wait_for_completion_timeout'
|
||||
]
|
||||
},
|
||||
'esql.async_query_stop': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'drop_null_columns'
|
||||
]
|
||||
},
|
||||
'esql.query': {
|
||||
path: [],
|
||||
body: [
|
||||
'columnar',
|
||||
'filter',
|
||||
'locale',
|
||||
'params',
|
||||
'profile',
|
||||
'query',
|
||||
'tables',
|
||||
'include_ccs_metadata'
|
||||
],
|
||||
query: [
|
||||
'format',
|
||||
'delimiter',
|
||||
'drop_null_columns'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -126,12 +51,8 @@ export default class Esql {
|
||||
async asyncQuery (this: That, params: T.EsqlAsyncQueryRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EsqlAsyncQueryResponse, unknown>>
|
||||
async asyncQuery (this: That, params: T.EsqlAsyncQueryRequest, options?: TransportRequestOptions): Promise<T.EsqlAsyncQueryResponse>
|
||||
async asyncQuery (this: That, params: T.EsqlAsyncQueryRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['esql.async_query']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['columnar', 'filter', 'locale', 'params', 'profile', 'query', 'tables', 'include_ccs_metadata']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -153,14 +74,8 @@ export default class Esql {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -180,10 +95,7 @@ export default class Esql {
|
||||
async asyncQueryDelete (this: That, params: T.EsqlAsyncQueryDeleteRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EsqlAsyncQueryDeleteResponse, unknown>>
|
||||
async asyncQueryDelete (this: That, params: T.EsqlAsyncQueryDeleteRequest, options?: TransportRequestOptions): Promise<T.EsqlAsyncQueryDeleteResponse>
|
||||
async asyncQueryDelete (this: That, params: T.EsqlAsyncQueryDeleteRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['esql.async_query_delete']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -225,10 +137,7 @@ export default class Esql {
|
||||
async asyncQueryGet (this: That, params: T.EsqlAsyncQueryGetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EsqlAsyncQueryGetResponse, unknown>>
|
||||
async asyncQueryGet (this: That, params: T.EsqlAsyncQueryGetRequest, options?: TransportRequestOptions): Promise<T.EsqlAsyncQueryGetResponse>
|
||||
async asyncQueryGet (this: That, params: T.EsqlAsyncQueryGetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['esql.async_query_get']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -270,10 +179,7 @@ export default class Esql {
|
||||
async asyncQueryStop (this: That, params: T.EsqlAsyncQueryStopRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EsqlAsyncQueryStopResponse, unknown>>
|
||||
async asyncQueryStop (this: That, params: T.EsqlAsyncQueryStopRequest, options?: TransportRequestOptions): Promise<T.EsqlAsyncQueryStopResponse>
|
||||
async asyncQueryStop (this: That, params: T.EsqlAsyncQueryStopRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['esql.async_query_stop']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -315,12 +221,8 @@ export default class Esql {
|
||||
async query (this: That, params: T.EsqlQueryRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EsqlQueryResponse, unknown>>
|
||||
async query (this: That, params: T.EsqlQueryRequest, options?: TransportRequestOptions): Promise<T.EsqlQueryResponse>
|
||||
async query (this: That, params: T.EsqlQueryRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['esql.query']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['columnar', 'filter', 'locale', 'params', 'profile', 'query', 'tables', 'include_ccs_metadata']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -342,14 +244,8 @@ export default class Esql {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,32 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
exists: {
|
||||
path: [
|
||||
'id',
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'preference',
|
||||
'realtime',
|
||||
'refresh',
|
||||
'routing',
|
||||
'_source',
|
||||
'_source_excludes',
|
||||
'_source_includes',
|
||||
'stored_fields',
|
||||
'version',
|
||||
'version_type'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Check a document. Verify that a document exists. For example, check to see if a document with the `_id` 0 exists: ``` HEAD my-index-000001/_doc/0 ``` If the document exists, the API returns a status code of `200 - OK`. If the document doesn’t exist, the API returns `404 - Not Found`. **Versioning support** You can use the `version` parameter to check the document only if its current version is equal to the specified one. Internally, Elasticsearch has marked the old document as deleted and added an entirely new document. The old version of the document doesn't disappear immediately, although you won't be able to access it. Elasticsearch cleans up deleted documents in the background as you continue to index more data.
|
||||
@ -70,10 +45,7 @@ export default async function ExistsApi (this: That, params: T.ExistsRequest, op
|
||||
export default async function ExistsApi (this: That, params: T.ExistsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ExistsResponse, unknown>>
|
||||
export default async function ExistsApi (this: That, params: T.ExistsRequest, options?: TransportRequestOptions): Promise<T.ExistsResponse>
|
||||
export default async function ExistsApi (this: That, params: T.ExistsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.exists
|
||||
|
||||
const acceptedPath: string[] = ['id', 'index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,31 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
exists_source: {
|
||||
path: [
|
||||
'id',
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'preference',
|
||||
'realtime',
|
||||
'refresh',
|
||||
'routing',
|
||||
'_source',
|
||||
'_source_excludes',
|
||||
'_source_includes',
|
||||
'version',
|
||||
'version_type'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Check for a document source. Check whether a document source exists in an index. For example: ``` HEAD my-index-000001/_source/1 ``` A document's source is not available if it is disabled in the mapping.
|
||||
@ -69,10 +45,7 @@ export default async function ExistsSourceApi (this: That, params: T.ExistsSourc
|
||||
export default async function ExistsSourceApi (this: That, params: T.ExistsSourceRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ExistsSourceResponse, unknown>>
|
||||
export default async function ExistsSourceApi (this: That, params: T.ExistsSourceRequest, options?: TransportRequestOptions): Promise<T.ExistsSourceResponse>
|
||||
export default async function ExistsSourceApi (this: That, params: T.ExistsSourceRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.exists_source
|
||||
|
||||
const acceptedPath: string[] = ['id', 'index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,38 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
explain: {
|
||||
path: [
|
||||
'id',
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'query'
|
||||
],
|
||||
query: [
|
||||
'analyzer',
|
||||
'analyze_wildcard',
|
||||
'default_operator',
|
||||
'df',
|
||||
'lenient',
|
||||
'preference',
|
||||
'routing',
|
||||
'_source',
|
||||
'_source_excludes',
|
||||
'_source_includes',
|
||||
'stored_fields',
|
||||
'q'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Explain a document match result. Get information about why a specific document matches, or doesn't match, a query. It computes a score explanation for a query and a specific document.
|
||||
@ -76,12 +45,8 @@ export default async function ExplainApi<TDocument = unknown> (this: That, param
|
||||
export default async function ExplainApi<TDocument = unknown> (this: That, params: T.ExplainRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.ExplainResponse<TDocument>, unknown>>
|
||||
export default async function ExplainApi<TDocument = unknown> (this: That, params: T.ExplainRequest, options?: TransportRequestOptions): Promise<T.ExplainResponse<TDocument>>
|
||||
export default async function ExplainApi<TDocument = unknown> (this: That, params: T.ExplainRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = acceptedParams.explain
|
||||
|
||||
const acceptedPath: string[] = ['id', 'index']
|
||||
const acceptedBody: string[] = ['query']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -103,14 +68,8 @@ export default async function ExplainApi<TDocument = unknown> (this: That, param
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,33 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Features {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'features.get_features': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'features.reset_features': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -72,10 +51,7 @@ export default class Features {
|
||||
async getFeatures (this: That, params?: T.FeaturesGetFeaturesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FeaturesGetFeaturesResponse, unknown>>
|
||||
async getFeatures (this: That, params?: T.FeaturesGetFeaturesRequest, options?: TransportRequestOptions): Promise<T.FeaturesGetFeaturesResponse>
|
||||
async getFeatures (this: That, params?: T.FeaturesGetFeaturesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['features.get_features']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -115,10 +91,7 @@ export default class Features {
|
||||
async resetFeatures (this: That, params?: T.FeaturesResetFeaturesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FeaturesResetFeaturesResponse, unknown>>
|
||||
async resetFeatures (this: That, params?: T.FeaturesResetFeaturesRequest, options?: TransportRequestOptions): Promise<T.FeaturesResetFeaturesResponse>
|
||||
async resetFeatures (this: That, params?: T.FeaturesResetFeaturesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['features.reset_features']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,35 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
field_caps: {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'fields',
|
||||
'index_filter',
|
||||
'runtime_mappings'
|
||||
],
|
||||
query: [
|
||||
'allow_no_indices',
|
||||
'expand_wildcards',
|
||||
'fields',
|
||||
'ignore_unavailable',
|
||||
'include_unmapped',
|
||||
'filters',
|
||||
'types',
|
||||
'include_empty_fields'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get the field capabilities. Get information about the capabilities of fields among multiple indices. For data streams, the API returns field capabilities among the stream’s backing indices. It returns runtime fields like any other field. For example, a runtime field with a type of keyword is returned the same as any other field that belongs to the `keyword` family.
|
||||
@ -73,12 +45,8 @@ export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequ
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FieldCapsResponse, unknown>>
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest, options?: TransportRequestOptions): Promise<T.FieldCapsResponse>
|
||||
export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = acceptedParams.field_caps
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['fields', 'index_filter', 'runtime_mappings']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -101,14 +69,8 @@ export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequ
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,159 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Fleet {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'fleet.delete_secret': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'fleet.get_secret': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'fleet.global_checkpoints': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'wait_for_advance',
|
||||
'wait_for_index',
|
||||
'checkpoints',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'fleet.msearch': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'searches'
|
||||
],
|
||||
query: [
|
||||
'allow_no_indices',
|
||||
'ccs_minimize_roundtrips',
|
||||
'expand_wildcards',
|
||||
'ignore_throttled',
|
||||
'ignore_unavailable',
|
||||
'max_concurrent_searches',
|
||||
'max_concurrent_shard_requests',
|
||||
'pre_filter_shard_size',
|
||||
'search_type',
|
||||
'rest_total_hits_as_int',
|
||||
'typed_keys',
|
||||
'wait_for_checkpoints',
|
||||
'allow_partial_search_results'
|
||||
]
|
||||
},
|
||||
'fleet.post_secret': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'fleet.search': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'aggregations',
|
||||
'aggs',
|
||||
'collapse',
|
||||
'explain',
|
||||
'ext',
|
||||
'from',
|
||||
'highlight',
|
||||
'track_total_hits',
|
||||
'indices_boost',
|
||||
'docvalue_fields',
|
||||
'min_score',
|
||||
'post_filter',
|
||||
'profile',
|
||||
'query',
|
||||
'rescore',
|
||||
'script_fields',
|
||||
'search_after',
|
||||
'size',
|
||||
'slice',
|
||||
'sort',
|
||||
'_source',
|
||||
'fields',
|
||||
'suggest',
|
||||
'terminate_after',
|
||||
'timeout',
|
||||
'track_scores',
|
||||
'version',
|
||||
'seq_no_primary_term',
|
||||
'stored_fields',
|
||||
'pit',
|
||||
'runtime_mappings',
|
||||
'stats'
|
||||
],
|
||||
query: [
|
||||
'allow_no_indices',
|
||||
'analyzer',
|
||||
'analyze_wildcard',
|
||||
'batched_reduce_size',
|
||||
'ccs_minimize_roundtrips',
|
||||
'default_operator',
|
||||
'df',
|
||||
'docvalue_fields',
|
||||
'expand_wildcards',
|
||||
'explain',
|
||||
'ignore_throttled',
|
||||
'ignore_unavailable',
|
||||
'lenient',
|
||||
'max_concurrent_shard_requests',
|
||||
'preference',
|
||||
'pre_filter_shard_size',
|
||||
'request_cache',
|
||||
'routing',
|
||||
'scroll',
|
||||
'search_type',
|
||||
'stats',
|
||||
'stored_fields',
|
||||
'suggest_field',
|
||||
'suggest_mode',
|
||||
'suggest_size',
|
||||
'suggest_text',
|
||||
'terminate_after',
|
||||
'timeout',
|
||||
'track_total_hits',
|
||||
'track_scores',
|
||||
'typed_keys',
|
||||
'rest_total_hits_as_int',
|
||||
'version',
|
||||
'_source',
|
||||
'_source_excludes',
|
||||
'_source_includes',
|
||||
'seq_no_primary_term',
|
||||
'q',
|
||||
'size',
|
||||
'from',
|
||||
'sort',
|
||||
'wait_for_checkpoints',
|
||||
'allow_partial_search_results'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -197,10 +50,7 @@ export default class Fleet {
|
||||
async deleteSecret (this: That, params?: T.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async deleteSecret (this: That, params?: T.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async deleteSecret (this: That, params?: T.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['fleet.delete_secret']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -241,10 +91,7 @@ export default class Fleet {
|
||||
async getSecret (this: That, params?: T.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async getSecret (this: That, params?: T.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async getSecret (this: That, params?: T.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['fleet.get_secret']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -286,10 +133,7 @@ export default class Fleet {
|
||||
async globalCheckpoints (this: That, params: T.FleetGlobalCheckpointsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FleetGlobalCheckpointsResponse, unknown>>
|
||||
async globalCheckpoints (this: That, params: T.FleetGlobalCheckpointsRequest, options?: TransportRequestOptions): Promise<T.FleetGlobalCheckpointsResponse>
|
||||
async globalCheckpoints (this: That, params: T.FleetGlobalCheckpointsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['fleet.global_checkpoints']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -331,12 +175,8 @@ export default class Fleet {
|
||||
async msearch<TDocument = unknown> (this: That, params: T.FleetMsearchRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FleetMsearchResponse<TDocument>, unknown>>
|
||||
async msearch<TDocument = unknown> (this: That, params: T.FleetMsearchRequest, options?: TransportRequestOptions): Promise<T.FleetMsearchResponse<TDocument>>
|
||||
async msearch<TDocument = unknown> (this: That, params: T.FleetMsearchRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['fleet.msearch']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['searches']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -348,14 +188,8 @@ export default class Fleet {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -384,10 +218,7 @@ export default class Fleet {
|
||||
async postSecret (this: That, params?: T.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>>
|
||||
async postSecret (this: That, params?: T.TODO, options?: TransportRequestOptions): Promise<T.TODO>
|
||||
async postSecret (this: That, params?: T.TODO, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['fleet.post_secret']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -426,12 +257,8 @@ export default class Fleet {
|
||||
async search<TDocument = unknown> (this: That, params: T.FleetSearchRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.FleetSearchResponse<TDocument>, unknown>>
|
||||
async search<TDocument = unknown> (this: That, params: T.FleetSearchRequest, options?: TransportRequestOptions): Promise<T.FleetSearchResponse<TDocument>>
|
||||
async search<TDocument = unknown> (this: That, params: T.FleetSearchRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['fleet.search']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'ext', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -453,14 +280,8 @@ export default class Fleet {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,33 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
get: {
|
||||
path: [
|
||||
'id',
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'force_synthetic_source',
|
||||
'preference',
|
||||
'realtime',
|
||||
'refresh',
|
||||
'routing',
|
||||
'_source',
|
||||
'_source_excludes',
|
||||
'_source_includes',
|
||||
'stored_fields',
|
||||
'version',
|
||||
'version_type'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get a document by its ID. Get a document and its source or stored fields from an index. By default, this API is realtime and is not affected by the refresh rate of the index (when data will become visible for search). In the case where stored fields are requested with the `stored_fields` parameter and the document has been updated but is not yet refreshed, the API will have to parse and analyze the source to extract the stored fields. To turn off realtime behavior, set the `realtime` parameter to false. **Source filtering** By default, the API returns the contents of the `_source` field unless you have used the `stored_fields` parameter or the `_source` field is turned off. You can turn off `_source` retrieval by using the `_source` parameter: ``` GET my-index-000001/_doc/0?_source=false ``` If you only need one or two fields from the `_source`, use the `_source_includes` or `_source_excludes` parameters to include or filter out particular fields. This can be helpful with large documents where partial retrieval can save on network overhead Both parameters take a comma separated list of fields or wildcard expressions. For example: ``` GET my-index-000001/_doc/0?_source_includes=*.id&_source_excludes=entities ``` If you only want to specify includes, you can use a shorter notation: ``` GET my-index-000001/_doc/0?_source=*.id ``` **Routing** If routing is used during indexing, the routing value also needs to be specified to retrieve a document. For example: ``` GET my-index-000001/_doc/2?routing=user1 ``` This request gets the document with ID 2, but it is routed based on the user. The document is not fetched if the correct routing is not specified. **Distributed** The GET operation is hashed into a specific shard ID. It is then redirected to one of the replicas within that shard ID and returns the result. The replicas are the primary shard and its replicas within that shard ID group. This means that the more replicas you have, the better your GET scaling will be. **Versioning support** You can use the `version` parameter to retrieve the document only if its current version is equal to the specified one. Internally, Elasticsearch has marked the old document as deleted and added an entirely new document. The old version of the document doesn't disappear immediately, although you won't be able to access it. Elasticsearch cleans up deleted documents in the background as you continue to index more data.
|
||||
@ -71,10 +45,7 @@ export default async function GetApi<TDocument = unknown> (this: That, params: T
|
||||
export default async function GetApi<TDocument = unknown> (this: That, params: T.GetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.GetResponse<TDocument>, unknown>>
|
||||
export default async function GetApi<TDocument = unknown> (this: That, params: T.GetRequest, options?: TransportRequestOptions): Promise<T.GetResponse<TDocument>>
|
||||
export default async function GetApi<TDocument = unknown> (this: That, params: T.GetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.get
|
||||
|
||||
const acceptedPath: string[] = ['id', 'index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,22 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
get_script: {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get a script or search template. Retrieves a stored script or search template.
|
||||
@ -60,10 +45,7 @@ export default async function GetScriptApi (this: That, params: T.GetScriptReque
|
||||
export default async function GetScriptApi (this: That, params: T.GetScriptRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.GetScriptResponse, unknown>>
|
||||
export default async function GetScriptApi (this: That, params: T.GetScriptRequest, options?: TransportRequestOptions): Promise<T.GetScriptResponse>
|
||||
export default async function GetScriptApi (this: That, params: T.GetScriptRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.get_script
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,18 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
get_script_context: {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get script contexts. Get a list of supported script contexts and their methods.
|
||||
@ -56,10 +45,7 @@ export default async function GetScriptContextApi (this: That, params?: T.GetScr
|
||||
export default async function GetScriptContextApi (this: That, params?: T.GetScriptContextRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.GetScriptContextResponse, unknown>>
|
||||
export default async function GetScriptContextApi (this: That, params?: T.GetScriptContextRequest, options?: TransportRequestOptions): Promise<T.GetScriptContextResponse>
|
||||
export default async function GetScriptContextApi (this: That, params?: T.GetScriptContextRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.get_script_context
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,18 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
get_script_languages: {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get script languages. Get a list of available script types, languages, and contexts.
|
||||
@ -56,10 +45,7 @@ export default async function GetScriptLanguagesApi (this: That, params?: T.GetS
|
||||
export default async function GetScriptLanguagesApi (this: That, params?: T.GetScriptLanguagesRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.GetScriptLanguagesResponse, unknown>>
|
||||
export default async function GetScriptLanguagesApi (this: That, params?: T.GetScriptLanguagesRequest, options?: TransportRequestOptions): Promise<T.GetScriptLanguagesResponse>
|
||||
export default async function GetScriptLanguagesApi (this: That, params?: T.GetScriptLanguagesRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.get_script_languages
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,32 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
get_source: {
|
||||
path: [
|
||||
'id',
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'preference',
|
||||
'realtime',
|
||||
'refresh',
|
||||
'routing',
|
||||
'_source',
|
||||
'_source_excludes',
|
||||
'_source_includes',
|
||||
'stored_fields',
|
||||
'version',
|
||||
'version_type'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get a document's source. Get the source of a document. For example: ``` GET my-index-000001/_source/1 ``` You can use the source filtering parameters to control which parts of the `_source` are returned: ``` GET my-index-000001/_source/1/?_source_includes=*.id&_source_excludes=entities ```
|
||||
@ -70,10 +45,7 @@ export default async function GetSourceApi<TDocument = unknown> (this: That, par
|
||||
export default async function GetSourceApi<TDocument = unknown> (this: That, params: T.GetSourceRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.GetSourceResponse<TDocument>, unknown>>
|
||||
export default async function GetSourceApi<TDocument = unknown> (this: That, params: T.GetSourceRequest, options?: TransportRequestOptions): Promise<T.GetSourceResponse<TDocument>>
|
||||
export default async function GetSourceApi<TDocument = unknown> (this: That, params: T.GetSourceRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.get_source
|
||||
|
||||
const acceptedPath: string[] = ['id', 'index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,36 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Graph {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'graph.explore': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'connections',
|
||||
'controls',
|
||||
'query',
|
||||
'vertices'
|
||||
],
|
||||
query: [
|
||||
'routing',
|
||||
'timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -75,12 +51,8 @@ export default class Graph {
|
||||
async explore (this: That, params: T.GraphExploreRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.GraphExploreResponse, unknown>>
|
||||
async explore (this: That, params: T.GraphExploreRequest, options?: TransportRequestOptions): Promise<T.GraphExploreResponse>
|
||||
async explore (this: That, params: T.GraphExploreRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['graph.explore']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['connections', 'controls', 'query', 'vertices']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -102,14 +74,8 @@ export default class Graph {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,24 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
health_report: {
|
||||
path: [
|
||||
'feature'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'timeout',
|
||||
'verbose',
|
||||
'size'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get the cluster health. Get a report with the health status of an Elasticsearch cluster. The report contains a list of indicators that compose Elasticsearch functionality. Each indicator has a health status of: green, unknown, yellow or red. The indicator will provide an explanation and metadata describing the reason for its current health status. The cluster’s status is controlled by the worst indicator status. In the event that an indicator’s status is non-green, a list of impacts may be present in the indicator result which detail the functionalities that are negatively affected by the health issue. Each impact carries with it a severity level, an area of the system that is affected, and a simple description of the impact on the system. Some health indicators can determine the root cause of a health problem and prescribe a set of steps that can be performed in order to improve the health of the system. The root cause and remediation steps are encapsulated in a diagnosis. A diagnosis contains a cause detailing a root cause analysis, an action containing a brief description of the steps to take to fix the problem, the list of affected resources (if applicable), and a detailed step-by-step troubleshooting guide to fix the diagnosed problem. NOTE: The health indicators perform root cause analysis of non-green health statuses. This can be computationally expensive when called frequently. When setting up automated polling of the API for health status, set verbose to false to disable the more expensive analysis logic.
|
||||
@ -62,10 +45,7 @@ export default async function HealthReportApi (this: That, params?: T.HealthRepo
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.HealthReportResponse, unknown>>
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest, options?: TransportRequestOptions): Promise<T.HealthReportResponse>
|
||||
export default async function HealthReportApi (this: That, params?: T.HealthReportRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.health_report
|
||||
|
||||
const acceptedPath: string[] = ['feature']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,120 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Ilm {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'ilm.delete_lifecycle': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ilm.explain_lifecycle': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'only_errors',
|
||||
'only_managed',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ilm.get_lifecycle': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ilm.get_status': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'ilm.migrate_to_data_tiers': {
|
||||
path: [],
|
||||
body: [
|
||||
'legacy_template_to_delete',
|
||||
'node_attribute'
|
||||
],
|
||||
query: [
|
||||
'dry_run',
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ilm.move_to_step': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'current_step',
|
||||
'next_step'
|
||||
],
|
||||
query: []
|
||||
},
|
||||
'ilm.put_lifecycle': {
|
||||
path: [
|
||||
'name'
|
||||
],
|
||||
body: [
|
||||
'policy'
|
||||
],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ilm.remove_policy': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'ilm.retry': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'ilm.start': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ilm.stop': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -159,10 +51,7 @@ export default class Ilm {
|
||||
async deleteLifecycle (this: That, params: T.IlmDeleteLifecycleRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmDeleteLifecycleResponse, unknown>>
|
||||
async deleteLifecycle (this: That, params: T.IlmDeleteLifecycleRequest, options?: TransportRequestOptions): Promise<T.IlmDeleteLifecycleResponse>
|
||||
async deleteLifecycle (this: That, params: T.IlmDeleteLifecycleRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ilm.delete_lifecycle']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -204,10 +93,7 @@ export default class Ilm {
|
||||
async explainLifecycle (this: That, params: T.IlmExplainLifecycleRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmExplainLifecycleResponse, unknown>>
|
||||
async explainLifecycle (this: That, params: T.IlmExplainLifecycleRequest, options?: TransportRequestOptions): Promise<T.IlmExplainLifecycleResponse>
|
||||
async explainLifecycle (this: That, params: T.IlmExplainLifecycleRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ilm.explain_lifecycle']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -249,10 +135,7 @@ export default class Ilm {
|
||||
async getLifecycle (this: That, params?: T.IlmGetLifecycleRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmGetLifecycleResponse, unknown>>
|
||||
async getLifecycle (this: That, params?: T.IlmGetLifecycleRequest, options?: TransportRequestOptions): Promise<T.IlmGetLifecycleResponse>
|
||||
async getLifecycle (this: That, params?: T.IlmGetLifecycleRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ilm.get_lifecycle']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -302,10 +185,7 @@ export default class Ilm {
|
||||
async getStatus (this: That, params?: T.IlmGetStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmGetStatusResponse, unknown>>
|
||||
async getStatus (this: That, params?: T.IlmGetStatusRequest, options?: TransportRequestOptions): Promise<T.IlmGetStatusResponse>
|
||||
async getStatus (this: That, params?: T.IlmGetStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ilm.get_status']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -345,12 +225,8 @@ export default class Ilm {
|
||||
async migrateToDataTiers (this: That, params?: T.IlmMigrateToDataTiersRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmMigrateToDataTiersResponse, unknown>>
|
||||
async migrateToDataTiers (this: That, params?: T.IlmMigrateToDataTiersRequest, options?: TransportRequestOptions): Promise<T.IlmMigrateToDataTiersResponse>
|
||||
async migrateToDataTiers (this: That, params?: T.IlmMigrateToDataTiersRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ilm.migrate_to_data_tiers']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['legacy_template_to_delete', 'node_attribute']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -373,14 +249,8 @@ export default class Ilm {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -400,12 +270,8 @@ export default class Ilm {
|
||||
async moveToStep (this: That, params: T.IlmMoveToStepRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmMoveToStepResponse, unknown>>
|
||||
async moveToStep (this: That, params: T.IlmMoveToStepRequest, options?: TransportRequestOptions): Promise<T.IlmMoveToStepResponse>
|
||||
async moveToStep (this: That, params: T.IlmMoveToStepRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ilm.move_to_step']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['current_step', 'next_step']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -427,14 +293,8 @@ export default class Ilm {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -457,12 +317,8 @@ export default class Ilm {
|
||||
async putLifecycle (this: That, params: T.IlmPutLifecycleRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmPutLifecycleResponse, unknown>>
|
||||
async putLifecycle (this: That, params: T.IlmPutLifecycleRequest, options?: TransportRequestOptions): Promise<T.IlmPutLifecycleResponse>
|
||||
async putLifecycle (this: That, params: T.IlmPutLifecycleRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ilm.put_lifecycle']
|
||||
|
||||
const acceptedPath: string[] = ['name']
|
||||
const acceptedBody: string[] = ['policy']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -484,14 +340,8 @@ export default class Ilm {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -514,10 +364,7 @@ export default class Ilm {
|
||||
async removePolicy (this: That, params: T.IlmRemovePolicyRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmRemovePolicyResponse, unknown>>
|
||||
async removePolicy (this: That, params: T.IlmRemovePolicyRequest, options?: TransportRequestOptions): Promise<T.IlmRemovePolicyResponse>
|
||||
async removePolicy (this: That, params: T.IlmRemovePolicyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ilm.remove_policy']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -559,10 +406,7 @@ export default class Ilm {
|
||||
async retry (this: That, params: T.IlmRetryRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmRetryResponse, unknown>>
|
||||
async retry (this: That, params: T.IlmRetryRequest, options?: TransportRequestOptions): Promise<T.IlmRetryResponse>
|
||||
async retry (this: That, params: T.IlmRetryRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ilm.retry']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -604,10 +448,7 @@ export default class Ilm {
|
||||
async start (this: That, params?: T.IlmStartRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmStartResponse, unknown>>
|
||||
async start (this: That, params?: T.IlmStartRequest, options?: TransportRequestOptions): Promise<T.IlmStartResponse>
|
||||
async start (this: That, params?: T.IlmStartRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ilm.start']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -647,10 +488,7 @@ export default class Ilm {
|
||||
async stop (this: That, params?: T.IlmStopRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IlmStopResponse, unknown>>
|
||||
async stop (this: That, params?: T.IlmStopRequest, options?: TransportRequestOptions): Promise<T.IlmStopResponse>
|
||||
async stop (this: That, params?: T.IlmStopRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ilm.stop']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -35,103 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Inference {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'inference.delete': {
|
||||
path: [
|
||||
'task_type',
|
||||
'inference_id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'dry_run',
|
||||
'force'
|
||||
]
|
||||
},
|
||||
'inference.get': {
|
||||
path: [
|
||||
'task_type',
|
||||
'inference_id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'inference.inference': {
|
||||
path: [
|
||||
'task_type',
|
||||
'inference_id'
|
||||
],
|
||||
body: [
|
||||
'query',
|
||||
'input',
|
||||
'task_settings'
|
||||
],
|
||||
query: [
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'inference.put': {
|
||||
path: [
|
||||
'task_type',
|
||||
'inference_id'
|
||||
],
|
||||
body: [
|
||||
'inference_config'
|
||||
],
|
||||
query: []
|
||||
},
|
||||
'inference.stream_inference': {
|
||||
path: [
|
||||
'inference_id',
|
||||
'task_type'
|
||||
],
|
||||
body: [
|
||||
'input'
|
||||
],
|
||||
query: []
|
||||
},
|
||||
'inference.unified_inference': {
|
||||
path: [
|
||||
'task_type',
|
||||
'inference_id'
|
||||
],
|
||||
body: [
|
||||
'messages',
|
||||
'model',
|
||||
'max_completion_tokens',
|
||||
'stop',
|
||||
'temperature',
|
||||
'tool_choice',
|
||||
'tools',
|
||||
'top_p'
|
||||
],
|
||||
query: [
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'inference.update': {
|
||||
path: [
|
||||
'inference_id',
|
||||
'task_type'
|
||||
],
|
||||
body: [
|
||||
'inference_config'
|
||||
],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -142,10 +51,7 @@ export default class Inference {
|
||||
async delete (this: That, params: T.InferenceDeleteRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.InferenceDeleteResponse, unknown>>
|
||||
async delete (this: That, params: T.InferenceDeleteRequest, options?: TransportRequestOptions): Promise<T.InferenceDeleteResponse>
|
||||
async delete (this: That, params: T.InferenceDeleteRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['inference.delete']
|
||||
|
||||
const acceptedPath: string[] = ['task_type', 'inference_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -195,10 +101,7 @@ export default class Inference {
|
||||
async get (this: That, params?: T.InferenceGetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.InferenceGetResponse, unknown>>
|
||||
async get (this: That, params?: T.InferenceGetRequest, options?: TransportRequestOptions): Promise<T.InferenceGetResponse>
|
||||
async get (this: That, params?: T.InferenceGetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['inference.get']
|
||||
|
||||
const acceptedPath: string[] = ['task_type', 'inference_id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -252,12 +155,8 @@ export default class Inference {
|
||||
async inference (this: That, params: T.InferenceInferenceRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.InferenceInferenceResponse, unknown>>
|
||||
async inference (this: That, params: T.InferenceInferenceRequest, options?: TransportRequestOptions): Promise<T.InferenceInferenceResponse>
|
||||
async inference (this: That, params: T.InferenceInferenceRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['inference.inference']
|
||||
|
||||
const acceptedPath: string[] = ['task_type', 'inference_id']
|
||||
const acceptedBody: string[] = ['query', 'input', 'task_settings']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -279,14 +178,8 @@ export default class Inference {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -317,12 +210,8 @@ export default class Inference {
|
||||
async put (this: That, params: T.InferencePutRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.InferencePutResponse, unknown>>
|
||||
async put (this: That, params: T.InferencePutRequest, options?: TransportRequestOptions): Promise<T.InferencePutResponse>
|
||||
async put (this: That, params: T.InferencePutRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['inference.put']
|
||||
|
||||
const acceptedPath: string[] = ['task_type', 'inference_id']
|
||||
const acceptedBody: string[] = ['inference_config']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -334,14 +223,8 @@ export default class Inference {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -372,12 +255,8 @@ export default class Inference {
|
||||
async streamInference (this: That, params: T.InferenceStreamInferenceRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.InferenceStreamInferenceResponse, unknown>>
|
||||
async streamInference (this: That, params: T.InferenceStreamInferenceRequest, options?: TransportRequestOptions): Promise<T.InferenceStreamInferenceResponse>
|
||||
async streamInference (this: That, params: T.InferenceStreamInferenceRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['inference.stream_inference']
|
||||
|
||||
const acceptedPath: string[] = ['inference_id', 'task_type']
|
||||
const acceptedBody: string[] = ['input']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -399,14 +278,8 @@ export default class Inference {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -437,12 +310,8 @@ export default class Inference {
|
||||
async unifiedInference (this: That, params: T.InferenceUnifiedInferenceRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.InferenceUnifiedInferenceResponse, unknown>>
|
||||
async unifiedInference (this: That, params: T.InferenceUnifiedInferenceRequest, options?: TransportRequestOptions): Promise<T.InferenceUnifiedInferenceResponse>
|
||||
async unifiedInference (this: That, params: T.InferenceUnifiedInferenceRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['inference.unified_inference']
|
||||
|
||||
const acceptedPath: string[] = ['task_type', 'inference_id']
|
||||
const acceptedBody: string[] = ['messages', 'model', 'max_completion_tokens', 'stop', 'temperature', 'tool_choice', 'tools', 'top_p']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -464,14 +333,8 @@ export default class Inference {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -502,12 +365,8 @@ export default class Inference {
|
||||
async update (this: That, params: T.InferenceUpdateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.InferenceUpdateResponse, unknown>>
|
||||
async update (this: That, params: T.InferenceUpdateRequest, options?: TransportRequestOptions): Promise<T.InferenceUpdateResponse>
|
||||
async update (this: That, params: T.InferenceUpdateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['inference.update']
|
||||
|
||||
const acceptedPath: string[] = ['inference_id', 'task_type']
|
||||
const acceptedBody: string[] = ['inference_config']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -519,14 +378,8 @@ export default class Inference {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,18 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
info: {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get cluster info. Get basic build, version, and cluster information.
|
||||
@ -56,10 +45,7 @@ export default async function InfoApi (this: That, params?: T.InfoRequest, optio
|
||||
export default async function InfoApi (this: That, params?: T.InfoRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.InfoResponse, unknown>>
|
||||
export default async function InfoApi (this: That, params?: T.InfoRequest, options?: TransportRequestOptions): Promise<T.InfoResponse>
|
||||
export default async function InfoApi (this: That, params?: T.InfoRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = acceptedParams.info
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,142 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Ingest {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'ingest.delete_geoip_database': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ingest.delete_ip_location_database': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ingest.delete_pipeline': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ingest.geo_ip_stats': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'ingest.get_geoip_database': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'ingest.get_ip_location_database': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout'
|
||||
]
|
||||
},
|
||||
'ingest.get_pipeline': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'summary'
|
||||
]
|
||||
},
|
||||
'ingest.processor_grok': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'ingest.put_geoip_database': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [
|
||||
'name',
|
||||
'maxmind'
|
||||
],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ingest.put_ip_location_database': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [
|
||||
'configuration'
|
||||
],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'ingest.put_pipeline': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [
|
||||
'_meta',
|
||||
'description',
|
||||
'on_failure',
|
||||
'processors',
|
||||
'version',
|
||||
'deprecated'
|
||||
],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout',
|
||||
'if_version'
|
||||
]
|
||||
},
|
||||
'ingest.simulate': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [
|
||||
'docs',
|
||||
'pipeline'
|
||||
],
|
||||
query: [
|
||||
'verbose'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -181,10 +51,7 @@ export default class Ingest {
|
||||
async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestDeleteGeoipDatabaseResponse, unknown>>
|
||||
async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestDeleteGeoipDatabaseResponse>
|
||||
async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ingest.delete_geoip_database']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -226,10 +93,7 @@ export default class Ingest {
|
||||
async deleteIpLocationDatabase (this: That, params: T.IngestDeleteIpLocationDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestDeleteIpLocationDatabaseResponse, unknown>>
|
||||
async deleteIpLocationDatabase (this: That, params: T.IngestDeleteIpLocationDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestDeleteIpLocationDatabaseResponse>
|
||||
async deleteIpLocationDatabase (this: That, params: T.IngestDeleteIpLocationDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ingest.delete_ip_location_database']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -271,10 +135,7 @@ export default class Ingest {
|
||||
async deletePipeline (this: That, params: T.IngestDeletePipelineRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestDeletePipelineResponse, unknown>>
|
||||
async deletePipeline (this: That, params: T.IngestDeletePipelineRequest, options?: TransportRequestOptions): Promise<T.IngestDeletePipelineResponse>
|
||||
async deletePipeline (this: That, params: T.IngestDeletePipelineRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ingest.delete_pipeline']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -316,10 +177,7 @@ export default class Ingest {
|
||||
async geoIpStats (this: That, params?: T.IngestGeoIpStatsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestGeoIpStatsResponse, unknown>>
|
||||
async geoIpStats (this: That, params?: T.IngestGeoIpStatsRequest, options?: TransportRequestOptions): Promise<T.IngestGeoIpStatsResponse>
|
||||
async geoIpStats (this: That, params?: T.IngestGeoIpStatsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ingest.geo_ip_stats']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -359,10 +217,7 @@ export default class Ingest {
|
||||
async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestGetGeoipDatabaseResponse, unknown>>
|
||||
async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestGetGeoipDatabaseResponse>
|
||||
async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ingest.get_geoip_database']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -412,10 +267,7 @@ export default class Ingest {
|
||||
async getIpLocationDatabase (this: That, params?: T.IngestGetIpLocationDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestGetIpLocationDatabaseResponse, unknown>>
|
||||
async getIpLocationDatabase (this: That, params?: T.IngestGetIpLocationDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestGetIpLocationDatabaseResponse>
|
||||
async getIpLocationDatabase (this: That, params?: T.IngestGetIpLocationDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ingest.get_ip_location_database']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -465,10 +317,7 @@ export default class Ingest {
|
||||
async getPipeline (this: That, params?: T.IngestGetPipelineRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestGetPipelineResponse, unknown>>
|
||||
async getPipeline (this: That, params?: T.IngestGetPipelineRequest, options?: TransportRequestOptions): Promise<T.IngestGetPipelineResponse>
|
||||
async getPipeline (this: That, params?: T.IngestGetPipelineRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ingest.get_pipeline']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -518,10 +367,7 @@ export default class Ingest {
|
||||
async processorGrok (this: That, params?: T.IngestProcessorGrokRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestProcessorGrokResponse, unknown>>
|
||||
async processorGrok (this: That, params?: T.IngestProcessorGrokRequest, options?: TransportRequestOptions): Promise<T.IngestProcessorGrokResponse>
|
||||
async processorGrok (this: That, params?: T.IngestProcessorGrokRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['ingest.processor_grok']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -561,12 +407,8 @@ export default class Ingest {
|
||||
async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestPutGeoipDatabaseResponse, unknown>>
|
||||
async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestPutGeoipDatabaseResponse>
|
||||
async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ingest.put_geoip_database']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const acceptedBody: string[] = ['name', 'maxmind']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -588,14 +430,8 @@ export default class Ingest {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -618,12 +454,8 @@ export default class Ingest {
|
||||
async putIpLocationDatabase (this: That, params: T.IngestPutIpLocationDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestPutIpLocationDatabaseResponse, unknown>>
|
||||
async putIpLocationDatabase (this: That, params: T.IngestPutIpLocationDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestPutIpLocationDatabaseResponse>
|
||||
async putIpLocationDatabase (this: That, params: T.IngestPutIpLocationDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ingest.put_ip_location_database']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const acceptedBody: string[] = ['configuration']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -635,14 +467,8 @@ export default class Ingest {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -665,12 +491,8 @@ export default class Ingest {
|
||||
async putPipeline (this: That, params: T.IngestPutPipelineRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestPutPipelineResponse, unknown>>
|
||||
async putPipeline (this: That, params: T.IngestPutPipelineRequest, options?: TransportRequestOptions): Promise<T.IngestPutPipelineResponse>
|
||||
async putPipeline (this: That, params: T.IngestPutPipelineRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ingest.put_pipeline']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const acceptedBody: string[] = ['_meta', 'description', 'on_failure', 'processors', 'version', 'deprecated']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -692,14 +514,8 @@ export default class Ingest {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -722,12 +538,8 @@ export default class Ingest {
|
||||
async simulate (this: That, params: T.IngestSimulateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestSimulateResponse, unknown>>
|
||||
async simulate (this: That, params: T.IngestSimulateRequest, options?: TransportRequestOptions): Promise<T.IngestSimulateResponse>
|
||||
async simulate (this: That, params: T.IngestSimulateRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['ingest.simulate']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const acceptedBody: string[] = ['docs', 'pipeline']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -749,14 +561,8 @@ export default class Ingest {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,31 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
knn_search: {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'_source',
|
||||
'docvalue_fields',
|
||||
'stored_fields',
|
||||
'fields',
|
||||
'filter',
|
||||
'knn'
|
||||
],
|
||||
query: [
|
||||
'routing'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Run a knn search. NOTE: The kNN search API has been replaced by the `knn` option in the search API. Perform a k-nearest neighbor (kNN) search on a dense_vector field and return the matching documents. Given a query vector, the API finds the k closest vectors and returns those documents as search hits. Elasticsearch uses the HNSW algorithm to support efficient kNN search. Like most kNN algorithms, HNSW is an approximate method that sacrifices result accuracy for improved search speed. This means the results returned are not always the true k closest neighbors. The kNN search API supports restricting the search using a filter. The search will return the top k documents that also match the filter query. A kNN search response has the exact same structure as a search API response. However, certain sections have a meaning specific to kNN search: * The document `_score` is determined by the similarity between the query and document vector. * The `hits.total` object contains the total number of nearest neighbor candidates considered, which is `num_candidates * num_shards`. The `hits.total.relation` will always be `eq`, indicating an exact value.
|
||||
@ -69,12 +45,8 @@ export default async function KnnSearchApi<TDocument = unknown> (this: That, par
|
||||
export default async function KnnSearchApi<TDocument = unknown> (this: That, params: T.KnnSearchRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.KnnSearchResponse<TDocument>, unknown>>
|
||||
export default async function KnnSearchApi<TDocument = unknown> (this: That, params: T.KnnSearchRequest, options?: TransportRequestOptions): Promise<T.KnnSearchResponse<TDocument>>
|
||||
export default async function KnnSearchApi<TDocument = unknown> (this: That, params: T.KnnSearchRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = acceptedParams.knn_search
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['_source', 'docvalue_fields', 'stored_fields', 'fields', 'filter', 'knn']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -96,14 +68,8 @@ export default async function KnnSearchApi<TDocument = unknown> (this: That, par
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,77 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class License {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'license.delete': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'license.get': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'accept_enterprise',
|
||||
'local'
|
||||
]
|
||||
},
|
||||
'license.get_basic_status': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'license.get_trial_status': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'license.post': {
|
||||
path: [],
|
||||
body: [
|
||||
'license',
|
||||
'licenses'
|
||||
],
|
||||
query: [
|
||||
'acknowledge',
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'license.post_start_basic': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'acknowledge',
|
||||
'master_timeout',
|
||||
'timeout'
|
||||
]
|
||||
},
|
||||
'license.post_start_trial': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: [
|
||||
'acknowledge',
|
||||
'type_query_string',
|
||||
'master_timeout'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -116,10 +51,7 @@ export default class License {
|
||||
async delete (this: That, params?: T.LicenseDeleteRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicenseDeleteResponse, unknown>>
|
||||
async delete (this: That, params?: T.LicenseDeleteRequest, options?: TransportRequestOptions): Promise<T.LicenseDeleteResponse>
|
||||
async delete (this: That, params?: T.LicenseDeleteRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['license.delete']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -159,10 +91,7 @@ export default class License {
|
||||
async get (this: That, params?: T.LicenseGetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicenseGetResponse, unknown>>
|
||||
async get (this: That, params?: T.LicenseGetRequest, options?: TransportRequestOptions): Promise<T.LicenseGetResponse>
|
||||
async get (this: That, params?: T.LicenseGetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['license.get']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -202,10 +131,7 @@ export default class License {
|
||||
async getBasicStatus (this: That, params?: T.LicenseGetBasicStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicenseGetBasicStatusResponse, unknown>>
|
||||
async getBasicStatus (this: That, params?: T.LicenseGetBasicStatusRequest, options?: TransportRequestOptions): Promise<T.LicenseGetBasicStatusResponse>
|
||||
async getBasicStatus (this: That, params?: T.LicenseGetBasicStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['license.get_basic_status']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -245,10 +171,7 @@ export default class License {
|
||||
async getTrialStatus (this: That, params?: T.LicenseGetTrialStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicenseGetTrialStatusResponse, unknown>>
|
||||
async getTrialStatus (this: That, params?: T.LicenseGetTrialStatusRequest, options?: TransportRequestOptions): Promise<T.LicenseGetTrialStatusResponse>
|
||||
async getTrialStatus (this: That, params?: T.LicenseGetTrialStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['license.get_trial_status']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -288,12 +211,8 @@ export default class License {
|
||||
async post (this: That, params?: T.LicensePostRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicensePostResponse, unknown>>
|
||||
async post (this: That, params?: T.LicensePostRequest, options?: TransportRequestOptions): Promise<T.LicensePostResponse>
|
||||
async post (this: That, params?: T.LicensePostRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['license.post']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const acceptedBody: string[] = ['license', 'licenses']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -316,14 +235,8 @@ export default class License {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
@ -343,10 +256,7 @@ export default class License {
|
||||
async postStartBasic (this: That, params?: T.LicensePostStartBasicRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicensePostStartBasicResponse, unknown>>
|
||||
async postStartBasic (this: That, params?: T.LicensePostStartBasicRequest, options?: TransportRequestOptions): Promise<T.LicensePostStartBasicResponse>
|
||||
async postStartBasic (this: That, params?: T.LicensePostStartBasicRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['license.post_start_basic']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -386,10 +296,7 @@ export default class License {
|
||||
async postStartTrial (this: That, params?: T.LicensePostStartTrialRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LicensePostStartTrialResponse, unknown>>
|
||||
async postStartTrial (this: That, params?: T.LicensePostStartTrialRequest, options?: TransportRequestOptions): Promise<T.LicensePostStartTrialResponse>
|
||||
async postStartTrial (this: That, params?: T.LicensePostStartTrialRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['license.post_start_trial']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
@ -35,44 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Logstash {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'logstash.delete_pipeline': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'logstash.get_pipeline': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'logstash.put_pipeline': {
|
||||
path: [
|
||||
'id'
|
||||
],
|
||||
body: [
|
||||
'pipeline'
|
||||
],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -83,10 +51,7 @@ export default class Logstash {
|
||||
async deletePipeline (this: That, params: T.LogstashDeletePipelineRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LogstashDeletePipelineResponse, unknown>>
|
||||
async deletePipeline (this: That, params: T.LogstashDeletePipelineRequest, options?: TransportRequestOptions): Promise<T.LogstashDeletePipelineResponse>
|
||||
async deletePipeline (this: That, params: T.LogstashDeletePipelineRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['logstash.delete_pipeline']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -128,10 +93,7 @@ export default class Logstash {
|
||||
async getPipeline (this: That, params?: T.LogstashGetPipelineRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LogstashGetPipelineResponse, unknown>>
|
||||
async getPipeline (this: That, params?: T.LogstashGetPipelineRequest, options?: TransportRequestOptions): Promise<T.LogstashGetPipelineResponse>
|
||||
async getPipeline (this: That, params?: T.LogstashGetPipelineRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['logstash.get_pipeline']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -181,12 +143,8 @@ export default class Logstash {
|
||||
async putPipeline (this: That, params: T.LogstashPutPipelineRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.LogstashPutPipelineResponse, unknown>>
|
||||
async putPipeline (this: That, params: T.LogstashPutPipelineRequest, options?: TransportRequestOptions): Promise<T.LogstashPutPipelineResponse>
|
||||
async putPipeline (this: That, params: T.LogstashPutPipelineRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = this.acceptedParams['logstash.put_pipeline']
|
||||
|
||||
const acceptedPath: string[] = ['id']
|
||||
const acceptedBody: string[] = ['pipeline']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -198,14 +156,8 @@ export default class Logstash {
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,35 +35,7 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
}
|
||||
|
||||
const commonQueryParams = ['error_trace', 'filter_path', 'human', 'pretty']
|
||||
|
||||
const acceptedParams: Record<string, { path: string[], body: string[], query: string[] }> = {
|
||||
mget: {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [
|
||||
'docs',
|
||||
'ids'
|
||||
],
|
||||
query: [
|
||||
'force_synthetic_source',
|
||||
'preference',
|
||||
'realtime',
|
||||
'refresh',
|
||||
'routing',
|
||||
'_source',
|
||||
'_source_excludes',
|
||||
'_source_includes',
|
||||
'stored_fields'
|
||||
]
|
||||
}
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
/**
|
||||
* Get multiple documents. Get multiple JSON documents by ID from one or more indices. If you specify an index in the request URI, you only need to specify the document IDs in the request body. To ensure fast responses, this multi get (mget) API responds with partial results if one or more shards fail. **Filter source fields** By default, the `_source` field is returned for every document (if stored). Use the `_source` and `_source_include` or `source_exclude` attributes to filter what fields are returned for a particular document. You can include the `_source`, `_source_includes`, and `_source_excludes` query parameters in the request URI to specify the defaults to use when there are no per-document instructions. **Get stored fields** Use the `stored_fields` attribute to specify the set of stored fields you want to retrieve. Any requested fields that are not stored are ignored. You can include the `stored_fields` query parameter in the request URI to specify the defaults to use when there are no per-document instructions.
|
||||
@ -73,12 +45,8 @@ export default async function MgetApi<TDocument = unknown> (this: That, params?:
|
||||
export default async function MgetApi<TDocument = unknown> (this: That, params?: T.MgetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.MgetResponse<TDocument>, unknown>>
|
||||
export default async function MgetApi<TDocument = unknown> (this: That, params?: T.MgetRequest, options?: TransportRequestOptions): Promise<T.MgetResponse<TDocument>>
|
||||
export default async function MgetApi<TDocument = unknown> (this: That, params?: T.MgetRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath,
|
||||
body: acceptedBody,
|
||||
query: acceptedQuery
|
||||
} = acceptedParams.mget
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const acceptedBody: string[] = ['docs', 'ids']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -101,14 +69,8 @@ export default async function MgetApi<TDocument = unknown> (this: That, params?:
|
||||
} else if (acceptedPath.includes(key)) {
|
||||
continue
|
||||
} else if (key !== 'body' && key !== 'querystring') {
|
||||
if (acceptedQuery.includes(key) || commonQueryParams.includes(key)) {
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
} else {
|
||||
body = body ?? {}
|
||||
// @ts-expect-error
|
||||
body[key] = params[key]
|
||||
}
|
||||
// @ts-expect-error
|
||||
querystring[key] = params[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -35,36 +35,12 @@ import {
|
||||
TransportResult
|
||||
} from '@elastic/transport'
|
||||
import * as T from '../types'
|
||||
|
||||
interface That {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
}
|
||||
interface That { transport: Transport }
|
||||
|
||||
export default class Migration {
|
||||
transport: Transport
|
||||
acceptedParams: Record<string, { path: string[], body: string[], query: string[] }>
|
||||
constructor (transport: Transport) {
|
||||
this.transport = transport
|
||||
this.acceptedParams = {
|
||||
'migration.deprecations': {
|
||||
path: [
|
||||
'index'
|
||||
],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'migration.get_feature_upgrade_status': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
},
|
||||
'migration.post_feature_upgrade': {
|
||||
path: [],
|
||||
body: [],
|
||||
query: []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -75,10 +51,7 @@ export default class Migration {
|
||||
async deprecations (this: That, params?: T.MigrationDeprecationsRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.MigrationDeprecationsResponse, unknown>>
|
||||
async deprecations (this: That, params?: T.MigrationDeprecationsRequest, options?: TransportRequestOptions): Promise<T.MigrationDeprecationsResponse>
|
||||
async deprecations (this: That, params?: T.MigrationDeprecationsRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['migration.deprecations']
|
||||
|
||||
const acceptedPath: string[] = ['index']
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -128,10 +101,7 @@ export default class Migration {
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.MigrationGetFeatureUpgradeStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.MigrationGetFeatureUpgradeStatusResponse, unknown>>
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.MigrationGetFeatureUpgradeStatusRequest, options?: TransportRequestOptions): Promise<T.MigrationGetFeatureUpgradeStatusResponse>
|
||||
async getFeatureUpgradeStatus (this: That, params?: T.MigrationGetFeatureUpgradeStatusRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['migration.get_feature_upgrade_status']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
@ -171,10 +141,7 @@ export default class Migration {
|
||||
async postFeatureUpgrade (this: That, params?: T.MigrationPostFeatureUpgradeRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.MigrationPostFeatureUpgradeResponse, unknown>>
|
||||
async postFeatureUpgrade (this: That, params?: T.MigrationPostFeatureUpgradeRequest, options?: TransportRequestOptions): Promise<T.MigrationPostFeatureUpgradeResponse>
|
||||
async postFeatureUpgrade (this: That, params?: T.MigrationPostFeatureUpgradeRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const {
|
||||
path: acceptedPath
|
||||
} = this.acceptedParams['migration.post_feature_upgrade']
|
||||
|
||||
const acceptedPath: string[] = []
|
||||
const userQuery = params?.querystring
|
||||
const querystring: Record<string, any> = userQuery != null ? { ...userQuery } : {}
|
||||
|
||||
|
||||
1833
src/api/api/ml.ts
1833
src/api/api/ml.ts
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user