Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 859ca633be | |||
| b70a54971c | |||
| 5dac169311 | |||
| d44b0ffc39 | |||
| 745919c92d | |||
| 5fcc557a78 | |||
| c68ddbd610 | |||
| 538efe0736 | |||
| 20d7cab748 | |||
| 8d96cb4342 | |||
| f08691cb9e | |||
| 69e4c31eb0 |
@ -6,7 +6,7 @@ steps:
|
||||
env:
|
||||
NODE_VERSION: "{{ matrix.nodejs }}"
|
||||
TEST_SUITE: "{{ matrix.suite }}"
|
||||
STACK_VERSION: 8.8.0-SNAPSHOT
|
||||
STACK_VERSION: 8.10.3-SNAPSHOT
|
||||
matrix:
|
||||
setup:
|
||||
suite:
|
||||
|
||||
30
.ci/Dockerfile
Normal file
30
.ci/Dockerfile
Normal file
@ -0,0 +1,30 @@
|
||||
ARG NODE_JS_VERSION=${NODE_JS_VERSION:-18}
|
||||
FROM node:${NODE_JS_VERSION}
|
||||
|
||||
ARG BUILDER_UID=1000
|
||||
ARG BUILDER_GID=1000
|
||||
ENV BUILDER_USER elastic
|
||||
ENV BUILDER_GROUP elastic
|
||||
|
||||
# install zip util
|
||||
RUN apt-get clean -y && \
|
||||
apt-get update -y && \
|
||||
apt-get install -y zip
|
||||
|
||||
# Set user permissions and directory
|
||||
RUN (id -g ${BUILDER_GID} || groupadd --system -g ${BUILDER_GID} ${BUILDER_GROUP}) \
|
||||
&& (id -u ${BUILDER_UID} || useradd --system --shell /bin/bash -u ${BUILDER_UID} -g ${BUILDER_GID} -m elastic) \
|
||||
&& mkdir -p /usr/src/elasticsearch-js \
|
||||
&& chown -R ${BUILDER_UID}:${BUILDER_GID} /usr/src/
|
||||
|
||||
WORKDIR /usr/src/elasticsearch-js
|
||||
|
||||
# run remainder of commands as non-root user
|
||||
USER ${BUILDER_UID}:${BUILDER_GID}
|
||||
|
||||
# install dependencies
|
||||
COPY package.json .
|
||||
RUN npm install --production=false
|
||||
|
||||
# copy project files
|
||||
COPY . .
|
||||
@ -28,6 +28,11 @@ import assert from 'assert'
|
||||
import { join } from 'desm'
|
||||
import semver from 'semver'
|
||||
|
||||
// xz/globals loads minimist-parsed args as a global `argv`, but it
|
||||
// interprets args like '8.10' as numbers and shortens them to '8.1'.
|
||||
// so we have to import and configure minimist ourselves.
|
||||
import minimist from 'minimist'
|
||||
const argv = minimist(process.argv.slice(2), { string: ['_', 'task'] })
|
||||
assert(typeof argv.task === 'string', 'Missing task parameter')
|
||||
|
||||
switch (argv.task) {
|
||||
|
||||
44
.ci/make.sh
44
.ci/make.sh
@ -131,7 +131,7 @@ esac
|
||||
echo -e "\033[34;1mINFO: building $product container\033[0m"
|
||||
|
||||
docker build \
|
||||
--file .buildkite/Dockerfile \
|
||||
--file .ci/Dockerfile \
|
||||
--tag "$product" \
|
||||
--build-arg NODE_JS_VERSION="$NODE_JS_VERSION" \
|
||||
--build-arg "BUILDER_UID=$(id -u)" \
|
||||
@ -144,19 +144,35 @@ docker build \
|
||||
|
||||
echo -e "\033[34;1mINFO: running $product container\033[0m"
|
||||
|
||||
docker run \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "cd /usr/src && \
|
||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
if [[ -z "${BUILDKITE+x}" ]] && [[ -z "${CI+x}" ]] && [[ -z "${GITHUB_ACTIONS+x}" ]]; then
|
||||
echo -e "\033[34;1mINFO: Running in local mode"
|
||||
docker run \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
--volume "$(realpath $repo/../elastic-client-generator-js):/usr/src/elastic-client-generator-js" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
else
|
||||
echo -e "\033[34;1mINFO: Running in CI mode"
|
||||
docker run \
|
||||
--volume "$repo:/usr/src/elasticsearch-js" \
|
||||
--volume /usr/src/elasticsearch-js/node_modules \
|
||||
-u "$(id -u):$(id -g)" \
|
||||
--env "WORKFLOW=$WORKFLOW" \
|
||||
--name make-elasticsearch-js \
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "cd /usr/src && \
|
||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .ci/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
fi
|
||||
|
||||
# ------------------------------------------------------- #
|
||||
# Post Command tasks & checks
|
||||
|
||||
@ -1,11 +1,22 @@
|
||||
[[changelog-client]]
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.10.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.4`
|
||||
|
||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.10 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.10.0
|
||||
|
||||
[discrete]
|
||||
=== Features
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.10.0`
|
||||
@ -13,6 +24,17 @@
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.10/release-notes-8.10.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.9.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.4`
|
||||
|
||||
Switching from `^8.3.4` to `~8.3.4` ensures 8.9 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.9.1
|
||||
|
||||
@ -49,6 +71,17 @@ In the https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/curre
|
||||
|
||||
The `user-agent` header the client used to connect to Elasticsearch was using a non-standard format that has been improved.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.2
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.2`
|
||||
|
||||
Switching from `^8.3.2` to `~8.3.2` ensures 8.8 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.8.1
|
||||
|
||||
@ -94,6 +127,17 @@ https://www.elastic.co/guide/en/elasticsearch/reference/8.8/release-notes-8.8.0.
|
||||
|
||||
Prior releases contained a bug where type declarations for legacy types that include a `body` key were not actually importing the type that includes the `body` key.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.7 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.7.0
|
||||
|
||||
@ -103,6 +147,17 @@ Prior releases contained a bug where type declarations for legacy types that inc
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.7/release-notes-8.7.0.html[here].
|
||||
|
||||
[discrete]
|
||||
=== 8.6.1
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Bump @elastic/transport to `~8.3.1`
|
||||
|
||||
Switching from `^8.3.1` to `~8.3.1` ensures 8.6 client users are not required to update to Node.js v18+, which is a new requirement set by `@elastic/transport` v8.5.0. See https://github.com/elastic/elastic-transport-js/issues/91[elastic/elastic-transport-js#91] for details.
|
||||
|
||||
[discrete]
|
||||
=== 8.6.0
|
||||
|
||||
|
||||
@ -11,6 +11,8 @@ This page contains the information you need to connect and use the Client with
|
||||
* <<client-faas-env, Using the Client in a Function-as-a-Service Environment>>
|
||||
* <<client-connect-proxy, Connecting through a proxy>>
|
||||
* <<client-error-handling, Handling errors>>
|
||||
* <<keep-alive, Keep-alive connections>>
|
||||
* <<close-connections, Closing a client's connections>>
|
||||
* <<product-check, Automatic product check>>
|
||||
|
||||
[[authentication]]
|
||||
@ -539,11 +541,17 @@ If you need to pass through an http(s) proxy for connecting to {es}, the client
|
||||
out of the box offers a handy configuration for helping you with it. Under the
|
||||
hood, it uses the https://github.com/delvedor/hpagent[`hpagent`] module.
|
||||
|
||||
IMPORTANT: In versions 8.0+ of the client, the default `Connection` type is set to `UndiciConnection`, which does not support proxy configurations.
|
||||
To use a proxy, you will need to use the `HttpConnection` class from `@elastic/transport` instead.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
import { HttpConnection } from '@elastic/transport'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http://localhost:8080'
|
||||
proxy: 'http://localhost:8080',
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
@ -553,11 +561,12 @@ Basic authentication is supported as well:
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
proxy: 'http:user:pwd@//localhost:8080'
|
||||
proxy: 'http:user:pwd@//localhost:8080',
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
If you are connecting through a not http(s) proxy, such as a `socks5` or `pac`,
|
||||
If you are connecting through a non-http(s) proxy, such as a `socks5` or `pac`,
|
||||
you can use the `agent` option to configure it.
|
||||
|
||||
[source,js]
|
||||
@ -567,7 +576,8 @@ const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
agent () {
|
||||
return new SocksProxyAgent('socks://127.0.0.1:1080')
|
||||
}
|
||||
},
|
||||
Connection: HttpConnection,
|
||||
})
|
||||
----
|
||||
|
||||
@ -651,6 +661,51 @@ a|* `name` - `string`
|
||||
* `headers` - `object`, the response status code
|
||||
|===
|
||||
|
||||
[[keep-alive]]
|
||||
[discrete]
|
||||
=== Keep-alive connections
|
||||
|
||||
By default, the client uses persistent, keep-alive connections to reduce the overhead of creating a new HTTP connection for each Elasticsearch request.
|
||||
If you are using the default `UndiciConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 10 minutes.
|
||||
If you are using the legacy `HttpConnection` connection class, it maintains a pool of 256 connections with a keep-alive of 1 minute.
|
||||
|
||||
If you need to disable keep-alive connections, you can override the HTTP agent with your preferred https://nodejs.org/api/http.html#http_new_agent_options[HTTP agent options]:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// the function takes as parameter the option
|
||||
// object passed to the Connection constructor
|
||||
agent: (opts) => new CustomAgent()
|
||||
})
|
||||
----
|
||||
|
||||
Or you can disable the HTTP agent entirely:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
// Disable agent and keep-alive
|
||||
agent: false
|
||||
})
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[close-connections]]
|
||||
=== Closing a client's connections
|
||||
|
||||
If you would like to close all open connections being managed by an instance of the client, use the `close()` function:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
});
|
||||
client.close();
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[product-check]]
|
||||
=== Automatic product check
|
||||
|
||||
@ -826,7 +826,7 @@ If `false`, the response does not include the total number of hits matching the
|
||||
** *`indices_boost` (Optional, Record<string, number>[])*: Boosts the _score of documents from specified indices.
|
||||
** *`docvalue_fields` (Optional, { field, format, include_unmapped }[])*: Array of wildcard (`*`) patterns.
|
||||
The request returns doc values for field names matching these patterns in the `hits.fields` property of the response.
|
||||
** *`knn` (Optional, { field, query_vector, query_vector_builder, k, num_candidates, boost, filter } | { field, query_vector, query_vector_builder, k, num_candidates, boost, filter }[])*: Defines the approximate kNN search to run.
|
||||
** *`knn` (Optional, { field, query_vector, query_vector_builder, k, num_candidates, boost, filter, similarity } | { field, query_vector, query_vector_builder, k, num_candidates, boost, filter, similarity }[])*: Defines the approximate kNN search to run.
|
||||
** *`rank` (Optional, { rrf })*: Defines the Reciprocal Rank Fusion (RRF) to use.
|
||||
** *`min_score` (Optional, number)*: Minimum `_score` for matching documents.
|
||||
Documents with a lower `_score` are not included in the search results.
|
||||
@ -1305,7 +1305,7 @@ Defaults to 10,000 hits.
|
||||
** *`indices_boost` (Optional, Record<string, number>[])*: Boosts the _score of documents from specified indices.
|
||||
** *`docvalue_fields` (Optional, { field, format, include_unmapped }[])*: Array of wildcard (*) patterns. The request returns doc values for field
|
||||
names matching these patterns in the hits.fields property of the response.
|
||||
** *`knn` (Optional, { field, query_vector, query_vector_builder, k, num_candidates, boost, filter } | { field, query_vector, query_vector_builder, k, num_candidates, boost, filter }[])*: Defines the approximate kNN search to run.
|
||||
** *`knn` (Optional, { field, query_vector, query_vector_builder, k, num_candidates, boost, filter, similarity } | { field, query_vector, query_vector_builder, k, num_candidates, boost, filter, similarity }[])*: Defines the approximate kNN search to run.
|
||||
** *`min_score` (Optional, number)*: Minimum _score for matching documents. Documents with a lower _score are
|
||||
not included in the search results.
|
||||
** *`post_filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, field_masking_span, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, text_expansion, wildcard, wrapper, type })*
|
||||
@ -6234,6 +6234,7 @@ client.ml.putTrainedModelVocabulary({ model_id, vocabulary })
|
||||
** *`model_id` (string)*: The unique identifier of the trained model.
|
||||
** *`vocabulary` (string[])*: The model vocabulary, which must not be empty.
|
||||
** *`merges` (Optional, string[])*: The optional model merges if required by the tokenizer.
|
||||
** *`scores` (Optional, number[])*: The optional vocabulary value scores if required by the tokenizer.
|
||||
|
||||
[discrete]
|
||||
==== reset_job
|
||||
@ -6354,6 +6355,7 @@ client.ml.startTrainedModelDeployment({ model_id })
|
||||
** *`cache_size` (Optional, number | string)*: The inference cache size (in memory outside the JVM heap) per node for the model.
|
||||
The default value is the same size as the `model_size_bytes`. To disable the cache,
|
||||
`0b` can be provided.
|
||||
** *`deployment_id` (Optional, string)*: A unique identifier for the deployment of the model.
|
||||
** *`number_of_allocations` (Optional, number)*: The number of model allocations on each node where the model is deployed.
|
||||
All allocations on a node share the same copy of the model in memory but use
|
||||
a separate set of threads to evaluate the model.
|
||||
@ -8917,6 +8919,8 @@ client.transform.deleteTransform({ transform_id })
|
||||
** *`transform_id` (string)*: Identifier for the transform.
|
||||
** *`force` (Optional, boolean)*: If this value is false, the transform must be stopped before it can be deleted. If true, the transform is
|
||||
deleted regardless of its current state.
|
||||
** *`delete_dest_index` (Optional, boolean)*: If this value is true, the destination index is deleted together with the transform. If false, the destination
|
||||
index will not be deleted
|
||||
** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error.
|
||||
|
||||
[discrete]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@elastic/elasticsearch",
|
||||
"version": "8.10.0",
|
||||
"versionCanary": "8.10.0-canary.1",
|
||||
"version": "8.10.1",
|
||||
"versionCanary": "8.10.1-canary.0",
|
||||
"description": "The official Elasticsearch client for Node.js",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
@ -82,7 +82,7 @@
|
||||
"zx": "^7.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@elastic/transport": "^8.3.4",
|
||||
"@elastic/transport": "~8.3.4",
|
||||
"tslib": "^2.4.0"
|
||||
},
|
||||
"tap": {
|
||||
|
||||
@ -71,7 +71,7 @@ export default class Eql {
|
||||
|
||||
/**
|
||||
* Returns async results from previously executed Event Query Language (EQL) search
|
||||
* @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/8.10/get-async-eql-search-api.html | Elasticsearch API documentation}
|
||||
* @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/8.10/get-async-eql-search-api.html | Elasticsearch API documentation}
|
||||
*/
|
||||
async get<TEvent = unknown> (this: That, params: T.EqlGetRequest | TB.EqlGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.EqlGetResponse<TEvent>>
|
||||
async get<TEvent = unknown> (this: That, params: T.EqlGetRequest | TB.EqlGetRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EqlGetResponse<TEvent>, unknown>>
|
||||
@ -97,7 +97,7 @@ export default class Eql {
|
||||
|
||||
/**
|
||||
* Returns the status of a previously submitted async or stored Event Query Language (EQL) search
|
||||
* @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/8.10/get-async-eql-status-api.html | Elasticsearch API documentation}
|
||||
* @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/8.10/get-async-eql-status-api.html | Elasticsearch API documentation}
|
||||
*/
|
||||
async getStatus (this: That, params: T.EqlGetStatusRequest | TB.EqlGetStatusRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.EqlGetStatusResponse>
|
||||
async getStatus (this: That, params: T.EqlGetStatusRequest | TB.EqlGetStatusRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.EqlGetStatusResponse, unknown>>
|
||||
|
||||
@ -1923,7 +1923,7 @@ export default class Ml {
|
||||
async putTrainedModelVocabulary (this: That, params: T.MlPutTrainedModelVocabularyRequest | TB.MlPutTrainedModelVocabularyRequest, options?: TransportRequestOptions): Promise<T.MlPutTrainedModelVocabularyResponse>
|
||||
async putTrainedModelVocabulary (this: That, params: T.MlPutTrainedModelVocabularyRequest | TB.MlPutTrainedModelVocabularyRequest, options?: TransportRequestOptions): Promise<any> {
|
||||
const acceptedPath: string[] = ['model_id']
|
||||
const acceptedBody: string[] = ['vocabulary', 'merges']
|
||||
const acceptedBody: string[] = ['vocabulary', 'merges', 'scores']
|
||||
const querystring: Record<string, any> = {}
|
||||
// @ts-expect-error
|
||||
const userBody: any = params?.body
|
||||
|
||||
@ -707,7 +707,7 @@ export interface MsearchMultiSearchItem<TDocument = unknown> extends SearchRespo
|
||||
status?: integer
|
||||
}
|
||||
|
||||
export interface MsearchMultiSearchResult<TDocument = unknown> {
|
||||
export interface MsearchMultiSearchResult<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> {
|
||||
took: long
|
||||
responses: MsearchResponseItem<TDocument>[]
|
||||
}
|
||||
@ -780,7 +780,7 @@ export interface MsearchRequest extends RequestBase {
|
||||
|
||||
export type MsearchRequestItem = MsearchMultisearchHeader | MsearchMultisearchBody
|
||||
|
||||
export type MsearchResponse<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> = MsearchMultiSearchResult<TDocument>
|
||||
export type MsearchResponse<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> = MsearchMultiSearchResult<TDocument, TAggregations>
|
||||
|
||||
export type MsearchResponseItem<TDocument = unknown> = MsearchMultiSearchItem<TDocument> | ErrorResponseBase
|
||||
|
||||
@ -796,7 +796,7 @@ export interface MsearchTemplateRequest extends RequestBase {
|
||||
|
||||
export type MsearchTemplateRequestItem = MsearchMultisearchHeader | MsearchTemplateTemplateConfig
|
||||
|
||||
export type MsearchTemplateResponse<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> = MsearchMultiSearchResult<TDocument>
|
||||
export type MsearchTemplateResponse<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> = MsearchMultiSearchResult<TDocument, TAggregations>
|
||||
|
||||
export interface MsearchTemplateTemplateConfig {
|
||||
explain?: boolean
|
||||
@ -2272,6 +2272,7 @@ export interface KnnQuery {
|
||||
num_candidates: long
|
||||
boost?: float
|
||||
filter?: QueryDslQueryContainer | QueryDslQueryContainer[]
|
||||
similarity?: float
|
||||
}
|
||||
|
||||
export interface LatLonGeoLocation {
|
||||
@ -14031,6 +14032,7 @@ export interface MlPutTrainedModelVocabularyRequest extends RequestBase {
|
||||
model_id: Id
|
||||
vocabulary: string[]
|
||||
merges?: string[]
|
||||
scores?: double[]
|
||||
}
|
||||
|
||||
export type MlPutTrainedModelVocabularyResponse = AcknowledgedResponseBase
|
||||
@ -14085,6 +14087,7 @@ export interface MlStartDatafeedResponse {
|
||||
export interface MlStartTrainedModelDeploymentRequest extends RequestBase {
|
||||
model_id: Id
|
||||
cache_size?: ByteSize
|
||||
deployment_id?: string
|
||||
number_of_allocations?: integer
|
||||
priority?: MlTrainingPriority
|
||||
queue_capacity?: integer
|
||||
@ -17361,6 +17364,7 @@ export interface TransformTimeSync {
|
||||
export interface TransformDeleteTransformRequest extends RequestBase {
|
||||
transform_id: Id
|
||||
force?: boolean
|
||||
delete_dest_index?: boolean
|
||||
timeout?: Duration
|
||||
}
|
||||
|
||||
|
||||
@ -735,7 +735,7 @@ export interface MsearchMultiSearchItem<TDocument = unknown> extends SearchRespo
|
||||
status?: integer
|
||||
}
|
||||
|
||||
export interface MsearchMultiSearchResult<TDocument = unknown> {
|
||||
export interface MsearchMultiSearchResult<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> {
|
||||
took: long
|
||||
responses: MsearchResponseItem<TDocument>[]
|
||||
}
|
||||
@ -809,7 +809,7 @@ export interface MsearchRequest extends RequestBase {
|
||||
|
||||
export type MsearchRequestItem = MsearchMultisearchHeader | MsearchMultisearchBody
|
||||
|
||||
export type MsearchResponse<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> = MsearchMultiSearchResult<TDocument>
|
||||
export type MsearchResponse<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> = MsearchMultiSearchResult<TDocument, TAggregations>
|
||||
|
||||
export type MsearchResponseItem<TDocument = unknown> = MsearchMultiSearchItem<TDocument> | ErrorResponseBase
|
||||
|
||||
@ -826,7 +826,7 @@ export interface MsearchTemplateRequest extends RequestBase {
|
||||
|
||||
export type MsearchTemplateRequestItem = MsearchMultisearchHeader | MsearchTemplateTemplateConfig
|
||||
|
||||
export type MsearchTemplateResponse<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> = MsearchMultiSearchResult<TDocument>
|
||||
export type MsearchTemplateResponse<TDocument = unknown, TAggregations = Record<AggregateName, AggregationsAggregate>> = MsearchMultiSearchResult<TDocument, TAggregations>
|
||||
|
||||
export interface MsearchTemplateTemplateConfig {
|
||||
explain?: boolean
|
||||
@ -2345,6 +2345,7 @@ export interface KnnQuery {
|
||||
num_candidates: long
|
||||
boost?: float
|
||||
filter?: QueryDslQueryContainer | QueryDslQueryContainer[]
|
||||
similarity?: float
|
||||
}
|
||||
|
||||
export interface LatLonGeoLocation {
|
||||
@ -14293,6 +14294,7 @@ export interface MlPutTrainedModelVocabularyRequest extends RequestBase {
|
||||
body?: {
|
||||
vocabulary: string[]
|
||||
merges?: string[]
|
||||
scores?: double[]
|
||||
}
|
||||
}
|
||||
|
||||
@ -14354,6 +14356,7 @@ export interface MlStartDatafeedResponse {
|
||||
export interface MlStartTrainedModelDeploymentRequest extends RequestBase {
|
||||
model_id: Id
|
||||
cache_size?: ByteSize
|
||||
deployment_id?: string
|
||||
number_of_allocations?: integer
|
||||
priority?: MlTrainingPriority
|
||||
queue_capacity?: integer
|
||||
@ -17771,6 +17774,7 @@ export interface TransformTimeSync {
|
||||
export interface TransformDeleteTransformRequest extends RequestBase {
|
||||
transform_id: Id
|
||||
force?: boolean
|
||||
delete_dest_index?: boolean
|
||||
timeout?: Duration
|
||||
}
|
||||
|
||||
|
||||
@ -527,6 +527,8 @@ export default class Helpers {
|
||||
* @return {object} The possible operations to run with the datasource.
|
||||
*/
|
||||
bulk<TDocument = unknown> (options: BulkHelperOptions<TDocument>, reqOptions: TransportRequestOptions = {}): BulkHelper<TDocument> {
|
||||
assert(!(reqOptions.asStream ?? false), 'bulk helper: the asStream request option is not supported')
|
||||
|
||||
const client = this[kClient]
|
||||
const { serializer } = client
|
||||
if (this[kMetaHeader] !== null) {
|
||||
|
||||
@ -18,6 +18,7 @@
|
||||
*/
|
||||
|
||||
import FakeTimers from '@sinonjs/fake-timers'
|
||||
import { AssertionError } from 'assert'
|
||||
import { createReadStream } from 'fs'
|
||||
import * as http from 'http'
|
||||
import { join } from 'path'
|
||||
@ -1336,6 +1337,37 @@ test('transport options', t => {
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Should not allow asStream request option', async t => {
|
||||
t.plan(2)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
})
|
||||
|
||||
try {
|
||||
await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return { index: { _index: 'test' } }
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
},
|
||||
refreshOnCompletion: true
|
||||
}, {
|
||||
headers: {
|
||||
foo: 'bar'
|
||||
},
|
||||
asStream: true,
|
||||
})
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof AssertionError)
|
||||
t.equal(err.message, 'bulk helper: the asStream request option is not supported')
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user