Compare commits

..

10 Commits

20 changed files with 741 additions and 1788 deletions

View File

@ -1,17 +1,20 @@
---
agents:
provider: "gcp"
image: family/core-ubuntu-2204
memory: "8G"
cpu: "2"
steps:
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }}) Test Suite: {{ matrix.suite }}"
agents:
provider: "gcp"
- label: ":elasticsearch: :javascript: ES JavaScript ({{ matrix.nodejs }})"
env:
NODE_VERSION: "{{ matrix.nodejs }}"
TEST_SUITE: "{{ matrix.suite }}"
STACK_VERSION: 8.16.0
TEST_SUITE: "platinum"
STACK_VERSION: 9.0.0
GITHUB_TOKEN_PATH: "secret/ci/elastic-elasticsearch-js/github-token"
TEST_ES_STACK: "1"
matrix:
setup:
suite:
- "free"
- "platinum"
nodejs:
- "18"
- "20"
@ -21,9 +24,6 @@ steps:
- wait: ~
continue_on_failure: true
- label: ":junit: Test results"
agents:
provider: "gcp"
image: family/core-ubuntu-2204
plugins:
- junit-annotate#v2.4.1:
artifacts: "junit-output/junit-*.xml"

View File

@ -10,22 +10,29 @@ export NODE_VERSION=${NODE_VERSION:-18}
echo "--- :javascript: Building Docker image"
docker build \
--file "$script_path/Dockerfile" \
--tag elastic/elasticsearch-js \
--build-arg NODE_VERSION="$NODE_VERSION" \
.
--file "$script_path/Dockerfile" \
--tag elastic/elasticsearch-js \
--build-arg NODE_VERSION="$NODE_VERSION" \
.
echo "--- :javascript: Running $TEST_SUITE tests"
GITHUB_TOKEN=$(vault read -field=token "$GITHUB_TOKEN_PATH")
export GITHUB_TOKEN
echo "--- :javascript: Running tests"
mkdir -p "$repo/junit-output"
docker run \
--network="${network_name}" \
--env "TEST_ES_SERVER=${elasticsearch_url}" \
--env "ELASTIC_PASSWORD=${elastic_password}" \
--env "TEST_SUITE=${TEST_SUITE}" \
--env "ELASTIC_USER=elastic" \
--env "BUILDKITE=true" \
--volume "$repo/junit-output:/junit-output" \
--name elasticsearch-js \
--rm \
elastic/elasticsearch-js \
bash -c "npm run test:integration; [ -f ./$TEST_SUITE-report-junit.xml ] && mv ./$TEST_SUITE-report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"
--network="${network_name}" \
--env TEST_ES_STACK \
--env STACK_VERSION \
--env GITHUB_TOKEN \
--env "TEST_ES_SERVER=${elasticsearch_url}" \
--env "ELASTIC_PASSWORD=${elastic_password}" \
--env "ELASTIC_USER=elastic" \
--env "BUILDKITE=true" \
--volume "/usr/src/app/node_modules" \
--volume "$repo:/usr/src/app" \
--volume "$repo/junit-output:/junit-output" \
--name elasticsearch-js \
--rm \
elastic/elasticsearch-js \
bash -c "npm run test:integration; [ -f ./report-junit.xml ] && mv ./report-junit.xml /junit-output/junit-$BUILDKITE_JOB_ID.xml || echo 'No JUnit artifact found'"

View File

@ -6,3 +6,6 @@ elasticsearch
lib
junit-output
.tap
rest-api-spec
yaml-rest-tests
generated-tests

4
.gitignore vendored
View File

@ -68,3 +68,7 @@ bun.lockb
test-results
processinfo
.tap
rest-api-spec
yaml-rest-tests
generated-tests
schema

View File

@ -74,3 +74,6 @@ CONTRIBUTING.md
src
bun.lockb
.tap
rest-api-spec
yaml-rest-tests
generated-tests

View File

@ -171,7 +171,18 @@ a|`function` - Takes a `Connection` and returns `true` if it can be sent a reque
_Default:_
[source,js]
----
() => true
function defaultNodeFilter (conn) {
if (conn.roles != null) {
if (
// avoid master-only nodes
conn.roles.master &&
!conn.roles.data &&
!conn.roles.ingest &&
!conn.roles.ml
) return false
}
return true
}
----
|`nodeSelector`

View File

@ -1,6 +1,29 @@
[[changelog-client]]
== Release notes
[discrete]
=== 8.18.2
[discrete]
==== Fixes
[discrete]
===== Ensure Apache Arrow ES|QL helper uses async iterator
The `esql.toArrowReader()` helper function was trying to return `RecordBatchStreamReader`, a synchronous iterator, despite the fact that the `apache-arrow` package was, in most cases, automatically coercing it to `AsyncRecordBatchStreamReader`, its asynchronous counterpart. It now is always returned as an async iterator.
[discrete]
=== 8.18.1
[discrete]
==== Fixes
[discrete]
===== Fix broken node roles and node filter
The docs note a `nodeFilter` option on the client that will, by default, filter the nodes based on any `roles` values that are set at instantition. At some point, this functionality was partially disabled. This brings the feature back, ensuring that it matches what the documentation has said it does all along.
[discrete]
=== 8.18.0
[discrete]

View File

@ -715,7 +715,7 @@ const result = await client.helpers
ES|QL can return results in multiple binary formats, including https://arrow.apache.org/[Apache Arrow]'s streaming format. Because it is a very efficient format to read, it can be valuable for performing high-performance in-memory analytics. And, because the response is streamed as batches of records, it can be used to produce aggregations and other calculations on larger-than-memory data sets.
`toArrowReader` returns a https://arrow.apache.org/docs/js/classes/Arrow_dom.RecordBatchReader.html[`RecordBatchStreamReader`].
`toArrowReader` returns a https://github.com/apache/arrow/blob/520ae44272d491bbb52eb3c9b84864ed7088f11a/js/src/ipc/reader.ts#L216[`AsyncRecordBatchStreamReader`].
[source,ts]
----
@ -724,7 +724,7 @@ const reader = await client.helpers
.toArrowReader()
// print each record as JSON
for (const recordBatch of reader) {
for await (const recordBatch of reader) {
for (const record of recordBatch) {
console.log(record.toJSON())
}

View File

@ -97,7 +97,7 @@ client.diagnostic.on('request', (err, result) => {
----
|`deserialization`
a|Emitted before starting deserialization and decompression. If you want to measure this phase duration, you should measure the time elapsed between this event and `response`. _(This event might not be emitted in certain situations)_.
a|Emitted before starting deserialization and decompression. If you want to measure this phase duration, you should measure the time elapsed between this event and `response`. This event might not be emitted in certain situations, like: when `asStream` is set to true; a response is terminated early due to content length being too large; or a response is terminated early by an `AbortController`.
[source,js]
----
client.diagnostic.on('deserialization', (err, result) => {

View File

@ -1,7 +1,7 @@
{
"name": "@elastic/elasticsearch",
"version": "8.18.0",
"versionCanary": "8.18.0-canary.0",
"version": "8.18.2",
"versionCanary": "8.18.2-canary.0",
"description": "The official Elasticsearch client for Node.js",
"main": "./index.js",
"types": "index.d.ts",
@ -18,7 +18,8 @@
"test:coverage-100": "npm run build && tap --coverage --100",
"test:coverage-report": "npm run build && tap --coverage && nyc report --reporter=text-lcov > coverage.lcov",
"test:coverage-ui": "npm run build && tap --coverage --coverage-report=html",
"test:integration": "tsc && node test/integration/index.js",
"test:integration-build": "npm run build && node test/integration/index.js",
"test:integration": "npm run test:integration-build && env tap run --jobs=1 --reporter=junit --reporter-file=report-junit.xml generated-tests/",
"lint": "ts-standard src",
"lint:fix": "ts-standard --fix src",
"license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause;0BSD'",
@ -76,8 +77,8 @@
"node-fetch": "2.7.0",
"ora": "5.4.1",
"proxy": "1.0.2",
"rimraf": "3.0.2",
"semver": "7.6.3",
"rimraf": "5.0.10",
"semver": "7.7.1",
"split2": "4.2.0",
"stoppable": "1.1.0",
"tap": "21.0.1",
@ -89,8 +90,8 @@
"zx": "7.2.3"
},
"dependencies": {
"@elastic/transport": "^8.9.1",
"apache-arrow": "^18.0.0",
"@elastic/transport": "^8.9.6",
"apache-arrow": "18.x - 19.x",
"tslib": "^2.4.0"
},
"tap": {

View File

@ -17,162 +17,102 @@
* under the License.
*/
'use strict'
const { join } = require('path')
const minimist = require('minimist')
const stream = require('stream')
const { promisify } = require('util')
const { createWriteStream, promises } = require('fs')
const rimraf = require('rimraf')
const { rimraf } = require('rimraf')
const fetch = require('node-fetch')
const crossZip = require('cross-zip')
const ora = require('ora')
const { mkdir, writeFile } = promises
const { mkdir, cp } = promises
const pipeline = promisify(stream.pipeline)
const unzip = promisify(crossZip.unzip)
const rm = promisify(rimraf)
const esFolder = join(__dirname, '..', 'elasticsearch')
const zipFolder = join(esFolder, 'artifacts.zip')
const specFolder = join(esFolder, 'rest-api-spec', 'api')
const freeTestFolder = join(esFolder, 'rest-api-spec', 'test', 'free')
const xPackTestFolder = join(esFolder, 'rest-api-spec', 'test', 'platinum')
const artifactInfo = join(esFolder, 'info.json')
const testYamlFolder = join(__dirname, '..', 'yaml-rest-tests')
const zipFile = join(__dirname, '..', 'elasticsearch-clients-tests.zip')
async function downloadArtifacts (opts) {
if (typeof opts.version !== 'string') {
throw new Error('Missing version')
}
const schemaFolder = join(__dirname, '..', 'schema')
const schemaJson = join(schemaFolder, 'schema.json')
async function downloadArtifacts (localTests, version = 'main') {
const log = ora('Checking out spec and test').start()
log.text = 'Resolving versions'
let resolved
try {
resolved = await resolve(opts.version, opts.hash)
} catch (err) {
log.fail(err.message)
process.exit(1)
const { GITHUB_TOKEN } = process.env
if (version !== 'main') {
version = version.split('.').slice(0, 2).join('.')
}
opts.id = opts.id || resolved.id
opts.hash = opts.hash || resolved.hash
opts.version = resolved.version
log.text = 'Clean tests folder'
await rimraf(testYamlFolder)
await mkdir(testYamlFolder, { recursive: true })
const info = loadInfo()
log.text = `Fetch test YAML files for version ${version}`
if (info && info.version === opts.version) {
if (info.hash === opts.hash && info.id === opts.id) {
log.succeed('The artifact copy present locally is already up to date')
return
if (localTests) {
log.text = `Copying local tests from ${localTests}`
await cp(localTests, testYamlFolder, { recursive: true })
} else {
if (!GITHUB_TOKEN) {
log.fail("Missing required environment variable 'GITHUB_TOKEN'")
process.exit(1)
}
const response = await fetch(`https://api.github.com/repos/elastic/elasticsearch-clients-tests/zipball/${version}`, {
headers: {
Authorization: `Bearer ${GITHUB_TOKEN}`,
Accept: 'application/vnd.github+json'
}
})
if (!response.ok) {
log.fail(`unexpected response ${response.statusText}`)
process.exit(1)
}
log.text = 'Downloading tests zipball'
await pipeline(response.body, createWriteStream(zipFile))
log.text = 'Unzipping tests'
await unzip(zipFile, testYamlFolder)
log.text = 'Cleanup'
await rimraf(zipFile)
}
log.text = 'Cleanup checkouts/elasticsearch'
await rm(esFolder)
await mkdir(esFolder, { recursive: true })
log.text = 'Fetching Elasticsearch specification'
await rimraf(schemaFolder)
await mkdir(schemaFolder, { recursive: true })
log.text = 'Downloading artifacts'
const response = await fetch(resolved.url)
const response = await fetch(`https://raw.githubusercontent.com/elastic/elasticsearch-specification/${version}/output/schema/schema.json`)
if (!response.ok) {
log.fail(`unexpected response ${response.statusText}`)
process.exit(1)
}
await pipeline(response.body, createWriteStream(zipFolder))
log.text = 'Unzipping'
await unzip(zipFolder, esFolder)
log.text = 'Cleanup'
await rm(zipFolder)
log.text = 'Update info'
await writeFile(artifactInfo, JSON.stringify(opts), 'utf8')
log.text = 'Downloading schema.json'
await pipeline(response.body, createWriteStream(schemaJson))
log.succeed('Done')
}
function loadInfo () {
try {
return require(artifactInfo)
} catch (err) {
return null
}
async function main () {
await downloadArtifacts()
}
async function resolve (version, hash) {
const response = await fetch(`https://artifacts-api.elastic.co/v1/versions/${version}`)
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
const data = await response.json()
const esBuilds = data.version.builds
.filter(build => build.projects.elasticsearch != null)
.map(build => {
return {
projects: build.projects.elasticsearch,
buildId: build.build_id,
date: build.start_time,
version: build.version
}
})
.sort((a, b) => {
const dA = new Date(a.date)
const dB = new Date(b.date)
if (dA > dB) return -1
if (dA < dB) return 1
return 0
})
if (hash != null) {
const build = esBuilds.find(build => build.projects.commit_hash === hash)
if (!build) {
throw new Error(`Can't find any build with hash '${hash}'`)
}
const zipKey = Object.keys(build.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip'))
return {
url: build.projects.packages[zipKey].url,
id: build.buildId,
hash: build.projects.commit_hash,
version: build.version
}
}
const lastBuild = esBuilds[0]
const zipKey = Object.keys(lastBuild.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip'))
return {
url: lastBuild.projects.packages[zipKey].url,
id: lastBuild.buildId,
hash: lastBuild.projects.commit_hash,
version: lastBuild.version
}
}
async function main (options) {
delete options._
await downloadArtifacts(options)
}
if (require.main === module) {
process.on('unhandledRejection', function (err) {
console.error(err)
process.exit(1)
})
const options = minimist(process.argv.slice(2), {
string: ['id', 'version', 'hash']
})
main(options).catch(t => {
main().catch(t => {
console.log(t)
process.exit(2)
})
}
module.exports = downloadArtifacts
module.exports.locations = {
specFolder,
freeTestFolder,
xPackTestFolder
}
module.exports.locations = { testYamlFolder, zipFile, schemaJson }

View File

@ -20,7 +20,7 @@
const { join } = require('path')
const { writeFile } = require('fs/promises')
const fetch = require('node-fetch')
const rimraf = require('rimraf')
const { rimraf } = require('rimraf')
const ora = require('ora')
const { convertRequests } = require('@elastic/request-converter')
const minimist = require('minimist')

View File

@ -78,6 +78,13 @@ export interface NodeOptions {
ssl?: TlsConnectionOptions
/** @property headers Custom HTTP headers that should be sent with each request */
headers?: Record<string, any>
/** @property roles Common Elasticsearch roles that can be assigned to this node. Can be helpful when writing custom nodeFilter or nodeSelector functions. */
roles?: {
master: boolean
data: boolean
ingest: boolean
ml: boolean
}
}
export interface ClientOptions {
@ -135,7 +142,7 @@ export interface ClientOptions {
* @defaultValue null */
agent?: HttpAgentOptions | UndiciAgentOptions | agentFn | false
/** @property nodeFilter A custom function used by the connection pool to determine which nodes are qualified to receive a request
* @defaultValue () => true */
* @defaultValue A function that uses the Connection `roles` property to avoid master-only nodes */
nodeFilter?: nodeFilterFn
/** @property nodeSelector A custom function used by the connection pool to determine which node should receive the next request
* @defaultValue A "round robin" function that loops sequentially through each node in the pool. */

View File

@ -25,7 +25,7 @@ import assert from 'node:assert'
import * as timersPromises from 'node:timers/promises'
import { Readable } from 'node:stream'
import { errors, TransportResult, TransportRequestOptions, TransportRequestOptionsWithMeta } from '@elastic/transport'
import { Table, TypeMap, tableFromIPC, RecordBatchStreamReader } from 'apache-arrow/Arrow.node'
import { Table, TypeMap, tableFromIPC, AsyncRecordBatchStreamReader } from 'apache-arrow/Arrow.node'
import Client from './client'
import * as T from './api/types'
import { Id } from './api/types'
@ -158,7 +158,7 @@ export interface EsqlResponse {
export interface EsqlHelper {
toRecords: <TDocument>() => Promise<EsqlToRecords<TDocument>>
toArrowTable: () => Promise<Table<TypeMap>>
toArrowReader: () => Promise<RecordBatchStreamReader>
toArrowReader: () => Promise<AsyncRecordBatchStreamReader>
}
export interface EsqlToRecords<TDocument> {
@ -1023,7 +1023,7 @@ export default class Helpers {
return tableFromIPC(response)
},
async toArrowReader (): Promise<RecordBatchStreamReader> {
async toArrowReader (): Promise<AsyncRecordBatchStreamReader> {
if (metaHeader !== null) {
reqOptions.headers = reqOptions.headers ?? {}
reqOptions.headers['x-elastic-client-meta'] = `${metaHeader as string},h=qa`
@ -1032,8 +1032,9 @@ export default class Helpers {
params.format = 'arrow'
const response = await client.esql.query(params, reqOptions)
return RecordBatchStreamReader.from(response)
// @ts-expect-error response is a Readable when asStream is true
const response: Readable = await client.esql.query(params, reqOptions)
return await AsyncRecordBatchStreamReader.from(Readable.from(response))
}
}

View File

@ -24,436 +24,63 @@ process.on('unhandledRejection', function (err) {
process.exit(1)
})
const { writeFileSync, readFileSync, readdirSync, statSync } = require('fs')
const { join, sep } = require('path')
const yaml = require('js-yaml')
const minimist = require('minimist')
const ms = require('ms')
const { Client } = require('../../index')
const build = require('./test-runner')
const { sleep } = require('./helper')
const createJunitReporter = require('./reporter')
const assert = require('node:assert')
const url = require('node:url')
const fs = require('node:fs')
const path = require('node:path')
const globby = require('globby')
const semver = require('semver')
const downloadArtifacts = require('../../scripts/download-artifacts')
const yamlFolder = downloadArtifacts.locations.freeTestFolder
const xPackYamlFolder = downloadArtifacts.locations.xPackTestFolder
const buildTests = require('./test-builder')
const MAX_API_TIME = 1000 * 90
const MAX_FILE_TIME = 1000 * 30
const MAX_TEST_TIME = 1000 * 3
const yamlFolder = downloadArtifacts.locations.testYamlFolder
const options = minimist(process.argv.slice(2), {
boolean: ['bail'],
string: ['suite', 'test'],
})
const freeSkips = {
// working on fixes for these
'/free/aggregations/bucket_selector.yml': ['bad script'],
'/free/aggregations/bucket_script.yml': ['bad script'],
// either the YAML test definition is wrong, or this fails because JSON.stringify is coercing "1.0" to "1"
'/free/aggregations/percentiles_bucket.yml': ['*'],
// not supported yet
'/free/cluster.desired_nodes/10_basic.yml': ['*'],
// Cannot find methods on `Internal` object
'/free/cluster.desired_balance/10_basic.yml': ['*'],
'/free/cluster.desired_nodes/20_dry_run.yml': ['*'],
'/free/cluster.prevalidate_node_removal/10_basic.yml': ['*'],
// the v8 client never sends the scroll_id in querystring,
// the way the test is structured causes a security exception
'free/scroll/10_basic.yml': ['Body params override query string'],
'free/scroll/11_clear.yml': [
'Body params with array param override query string',
'Body params with string param scroll id override query string'
],
'free/cat.allocation/10_basic.yml': ['*'],
'free/cat.snapshots/10_basic.yml': ['Test cat snapshots output'],
'indices.stats/50_disk_usage.yml': ['Disk usage stats'],
'indices.stats/60_field_usage.yml': ['Field usage stats'],
// skipping because we are booting ES with `discovery.type=single-node`
// and this test will fail because of this configuration
'nodes.stats/30_discovery.yml': ['*'],
// the expected error is returning a 503,
// which triggers a retry and the node to be marked as dead
'search.aggregation/240_max_buckets.yml': ['*'],
// long values and json do not play nicely together
'search.aggregation/40_range.yml': ['Min and max long range bounds'],
// the yaml runner assumes that null means "does not exists",
// while null is a valid json value, so the check will fail
'search/320_disallow_queries.yml': ['Test disallow expensive queries'],
'free/tsdb/90_unsupported_operations.yml': ['noop update'],
}
const platinumDenyList = {
'api_key/10_basic.yml': ['Test get api key'],
'api_key/20_query.yml': ['*'],
'api_key/11_invalidation.yml': ['Test invalidate api key by realm name'],
'analytics/histogram.yml': ['Histogram requires values in increasing order'],
// object keys must me strings, and `0.0.toString()` is `0`
'ml/evaluate_data_frame.yml': [
'Test binary_soft_classifition precision',
'Test binary_soft_classifition recall',
'Test binary_soft_classifition confusion_matrix'
],
// The cleanup fails with a index not found when retrieving the jobs
'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'],
'ml/bucket_correlation_agg.yml': ['Test correlation bucket agg simple'],
// start should be a string
'ml/jobs_get_result_overall_buckets.yml': ['Test overall buckets given epoch start and end params'],
// this can't happen with the client
'ml/start_data_frame_analytics.yml': ['Test start with inconsistent body/param ids'],
'ml/stop_data_frame_analytics.yml': ['Test stop with inconsistent body/param ids'],
'ml/preview_datafeed.yml': ['*'],
// Investigate why is failing
'ml/inference_crud.yml': ['*'],
'ml/categorization_agg.yml': ['Test categorization aggregation with poor settings'],
'ml/filter_crud.yml': ['*'],
// investigate why this is failing
'monitoring/bulk/10_basic.yml': ['*'],
'monitoring/bulk/20_privileges.yml': ['*'],
'license/20_put_license.yml': ['*'],
'snapshot/10_basic.yml': ['*'],
'snapshot/20_operator_privileges_disabled.yml': ['*'],
// the body is correct, but the regex is failing
'sql/sql.yml': ['Getting textual representation'],
'searchable_snapshots/10_usage.yml': ['*'],
'service_accounts/10_basic.yml': ['*'],
// we are setting two certificates in the docker config
'ssl/10_basic.yml': ['*'],
'token/10_basic.yml': ['*'],
'token/11_invalidation.yml': ['*'],
// very likely, the index template has not been loaded yet.
// we should run a indices.existsTemplate, but the name of the
// template may vary during time.
'transforms_crud.yml': [
'Test basic transform crud',
'Test transform with query and array of indices in source',
'Test PUT continuous transform',
'Test PUT continuous transform without delay set'
],
'transforms_force_delete.yml': [
'Test force deleting a running transform'
],
'transforms_cat_apis.yml': ['*'],
'transforms_start_stop.yml': ['*'],
'transforms_stats.yml': ['*'],
'transforms_stats_continuous.yml': ['*'],
'transforms_update.yml': ['*'],
// js does not support ulongs
'unsigned_long/10_basic.yml': ['*'],
'unsigned_long/20_null_value.yml': ['*'],
'unsigned_long/30_multi_fields.yml': ['*'],
'unsigned_long/40_different_numeric.yml': ['*'],
'unsigned_long/50_script_values.yml': ['*'],
// the v8 client flattens the body into the parent object
'platinum/users/10_basic.yml': ['Test put user with different username in body'],
// docker issue?
'watcher/execute_watch/60_http_input.yml': ['*'],
// the checks are correct, but for some reason the test is failing on js side
// I bet is because the backslashes in the rg
'watcher/execute_watch/70_invalid.yml': ['*'],
'watcher/put_watch/10_basic.yml': ['*'],
'xpack/15_basic.yml': ['*'],
// test that are failing that needs to be investigated
// the error cause can either be in the yaml test or in the specification
// start should be a string in the yaml test
'platinum/ml/delete_job_force.yml': ['Test force delete an open job that is referred by a started datafeed'],
'platinum/ml/evaluate_data_frame.yml': ['*'],
'platinum/ml/get_datafeed_stats.yml': ['*'],
// start should be a string in the yaml test
'platinum/ml/start_stop_datafeed.yml': ['*'],
}
function runner (opts = {}) {
const options = { node: opts.node }
if (opts.isXPack) {
options.tls = {
ca: readFileSync(join(__dirname, '..', '..', '.buildkite', 'certs', 'ca.crt'), 'utf8'),
rejectUnauthorized: false
const getAllFiles = async dir => {
const files = await globby(dir, {
expandDirectories: {
extensions: ['yml', 'yaml']
}
}
const client = new Client(options)
log('Loading yaml suite')
start({ client, isXPack: opts.isXPack })
.catch(err => {
if (err.name === 'ResponseError') {
console.error(err)
console.log(JSON.stringify(err.meta, null, 2))
} else {
console.error(err)
}
process.exit(1)
})
})
return files.sort()
}
async function waitCluster (client, times = 0) {
try {
await client.cluster.health({ wait_for_status: 'green', timeout: '50s' })
} catch (err) {
if (++times < 10) {
await sleep(5000)
return waitCluster(client, times)
}
console.error(err)
process.exit(1)
}
}
async function start ({ client, isXPack }) {
log('Waiting for Elasticsearch')
await waitCluster(client)
const body = await client.info()
const { number: version, build_hash: hash } = body.version
log(`Downloading artifacts for hash ${hash}...`)
await downloadArtifacts({ hash, version })
log(`Testing ${isXPack ? 'Platinum' : 'Free'} api...`)
const junit = createJunitReporter()
const junitTestSuites = junit.testsuites(`Integration test for ${isXPack ? 'Platinum' : 'Free'} api`)
const stats = {
total: 0,
skip: 0,
pass: 0,
assertions: 0
}
const folders = getAllFiles(isXPack ? xPackYamlFolder : yamlFolder)
.filter(t => !/(README|TODO)/g.test(t))
// we cluster the array based on the folder names,
// to provide a better test log output
.reduce((arr, file) => {
const path = file.slice(file.indexOf('/rest-api-spec/test'), file.lastIndexOf('/'))
let inserted = false
for (let i = 0; i < arr.length; i++) {
if (arr[i][0].includes(path)) {
inserted = true
arr[i].push(file)
break
}
}
if (!inserted) arr.push([file])
return arr
}, [])
const totalTime = now()
for (const folder of folders) {
// pretty name
const apiName = folder[0].slice(
folder[0].indexOf(`${sep}rest-api-spec${sep}test`) + 19,
folder[0].lastIndexOf(sep)
)
log('Testing ' + apiName.slice(1))
const apiTime = now()
for (const file of folder) {
const testRunner = build({
client,
version,
isXPack: file.includes('platinum')
})
const fileTime = now()
const data = readFileSync(file, 'utf8')
// get the test yaml (as object), some file has multiple yaml documents inside,
// every document is separated by '---', so we split on the separator
// and then we remove the empty strings, finally we parse them
const tests = data
.split('\n---\n')
.map(s => s.trim())
// empty strings
.filter(Boolean)
.map(parse)
// null values
.filter(Boolean)
// get setup and teardown if present
let setupTest = null
let teardownTest = null
for (const test of tests) {
if (test.setup) setupTest = test.setup
if (test.teardown) teardownTest = test.teardown
}
const cleanPath = file.slice(file.lastIndexOf(apiName))
// skip if --suite CLI arg doesn't match
if (options.suite && !cleanPath.endsWith(options.suite)) continue
log(' ' + cleanPath)
const junitTestSuite = junitTestSuites.testsuite(apiName.slice(1) + ' - ' + cleanPath)
for (const test of tests) {
const testTime = now()
const name = Object.keys(test)[0]
// skip setups, teardowns and anything that doesn't match --test flag when present
if (name === 'setup' || name === 'teardown') continue
if (options.test && !name.endsWith(options.test)) continue
const junitTestCase = junitTestSuite.testcase(name, `node_${process.version}: ${cleanPath}`)
stats.total += 1
if (shouldSkip(isXPack, file, name)) {
stats.skip += 1
junitTestCase.skip('This test is in the skip list of the client')
junitTestCase.end()
continue
}
log(' - ' + name)
try {
await testRunner.run(setupTest, test[name], teardownTest, stats, junitTestCase)
stats.pass += 1
} catch (err) {
junitTestCase.failure(err)
junitTestCase.end()
junitTestSuite.end()
junitTestSuites.end()
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
err.meta = JSON.stringify(err.meta ?? {}, null, 2)
console.error(err)
if (options.bail) {
process.exit(1)
} else {
continue
}
}
const totalTestTime = now() - testTime
junitTestCase.end()
if (totalTestTime > MAX_TEST_TIME) {
log(' took too long: ' + ms(totalTestTime))
} else {
log(' took: ' + ms(totalTestTime))
}
}
junitTestSuite.end()
const totalFileTime = now() - fileTime
if (totalFileTime > MAX_FILE_TIME) {
log(` ${cleanPath} took too long: ` + ms(totalFileTime))
} else {
log(` ${cleanPath} took: ` + ms(totalFileTime))
}
}
const totalApiTime = now() - apiTime
if (totalApiTime > MAX_API_TIME) {
log(`${apiName} took too long: ` + ms(totalApiTime))
} else {
log(`${apiName} took: ` + ms(totalApiTime))
}
}
junitTestSuites.end()
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
log(`Total testing time: ${ms(now() - totalTime)}`)
log(`Test stats:
- Total: ${stats.total}
- Skip: ${stats.skip}
- Pass: ${stats.pass}
- Fail: ${stats.total - (stats.pass + stats.skip)}
- Assertions: ${stats.assertions}
`)
}
function log (text) {
process.stdout.write(text + '\n')
}
function now () {
const ts = process.hrtime()
return (ts[0] * 1e3) + (ts[1] / 1e6)
}
function parse (data) {
let doc
try {
doc = yaml.load(data, { schema: yaml.CORE_SCHEMA })
} catch (err) {
console.error(err)
return
}
return doc
}
function generateJunitXmlReport (junit, suite) {
writeFileSync(
join(__dirname, '..', '..', `${suite}-report-junit.xml`),
junit.prettyPrint()
)
async function doTestBuilder (version, clientOptions) {
await downloadArtifacts(undefined, version)
const files = await getAllFiles(yamlFolder)
await buildTests(files, clientOptions)
}
if (require.main === module) {
const scheme = process.env.TEST_SUITE === 'platinum' ? 'https' : 'http'
const node = process.env.TEST_ES_SERVER || `${scheme}://elastic:changeme@localhost:9200`
const opts = {
node,
isXPack: process.env.TEST_SUITE !== 'free'
const node = process.env.TEST_ES_SERVER
const apiKey = process.env.ES_API_SECRET_KEY
const password = process.env.ELASTIC_PASSWORD
let version = process.env.STACK_VERSION
assert(node != null, 'Environment variable missing: TEST_ES_SERVER')
assert(apiKey != null || password != null, 'Environment variable missing: ES_API_SECRET_KEY or ELASTIC_PASSWORD')
assert(version != null, 'Environment variable missing: STACK_VERSION')
version = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
const clientOptions = { node }
if (apiKey != null) {
clientOptions.auth = { apiKey }
} else {
clientOptions.auth = { username: 'elastic', password }
}
runner(opts)
}
const shouldSkip = (isXPack, file, name) => {
if (options.suite || options.test) return false
let list = Object.keys(freeSkips)
for (let i = 0; i < list.length; i++) {
const freeTest = freeSkips[list[i]]
for (let j = 0; j < freeTest.length; j++) {
if (file.endsWith(list[i]) && (name === freeTest[j] || freeTest[j] === '*')) {
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
log(`Skipping test ${testName} because it is denylisted in the free test suite`)
return true
}
const nodeUrl = new url.URL(node)
if (nodeUrl.protocol === 'https:') {
clientOptions.tls = {
ca: fs.readFileSync(path.join(__dirname, '..', '..', '.buildkite', 'certs', 'ca.crt'), 'utf8'),
rejectUnauthorized: false
}
}
if (file.includes('x-pack') || isXPack) {
list = Object.keys(platinumDenyList)
for (let i = 0; i < list.length; i++) {
const platTest = platinumDenyList[list[i]]
for (let j = 0; j < platTest.length; j++) {
if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) {
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
log(`Skipping test ${testName} because it is denylisted in the platinum test suite`)
return true
}
}
}
}
return false
doTestBuilder(version, clientOptions)
.then(() => process.exit(0))
.catch(err => {
console.error(err)
process.exit(1)
})
}
const getAllFiles = dir =>
readdirSync(dir).reduce((files, file) => {
const name = join(dir, file)
const isDirectory = statSync(name).isDirectory()
return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name]
}, [])
module.exports = runner

View File

@ -1,110 +0,0 @@
'use strict'
const assert = require('node:assert')
const { create } = require('xmlbuilder2')
function createJunitReporter () {
const report = {}
return { testsuites, prettyPrint }
function prettyPrint () {
return create(report).end({ prettyPrint: true })
}
function testsuites (name) {
assert(name, 'The testsuites name is required')
assert(report.testsuites === undefined, 'Cannot set more than one testsuites block')
const startTime = Date.now()
report.testsuites = {
'@id': new Date().toISOString(),
'@name': name
}
const testsuiteList = []
return {
testsuite: createTestSuite(testsuiteList),
end () {
report.testsuites['@time'] = Math.round((Date.now() - startTime) / 1000)
report.testsuites['@tests'] = testsuiteList.reduce((acc, val) => {
acc += val['@tests']
return acc
}, 0)
report.testsuites['@failures'] = testsuiteList.reduce((acc, val) => {
acc += val['@failures']
return acc
}, 0)
report.testsuites['@skipped'] = testsuiteList.reduce((acc, val) => {
acc += val['@skipped']
return acc
}, 0)
if (testsuiteList.length) {
report.testsuites.testsuite = testsuiteList
}
}
}
}
function createTestSuite (testsuiteList) {
return function testsuite (name) {
assert(name, 'The testsuite name is required')
const startTime = Date.now()
const suite = {
'@id': new Date().toISOString(),
'@name': name
}
const testcaseList = []
testsuiteList.push(suite)
return {
testcase: createTestCase(testcaseList),
end () {
suite['@time'] = Math.round((Date.now() - startTime) / 1000)
suite['@tests'] = testcaseList.length
suite['@failures'] = testcaseList.filter(t => t.failure).length
suite['@skipped'] = testcaseList.filter(t => t.skipped).length
if (testcaseList.length) {
suite.testcase = testcaseList
}
}
}
}
}
function createTestCase (testcaseList) {
return function testcase (name, file) {
assert(name, 'The testcase name is required')
const startTime = Date.now()
const tcase = {
'@id': new Date().toISOString(),
'@name': name
}
if (file) tcase['@file'] = file
testcaseList.push(tcase)
return {
failure (error) {
assert(error, 'The failure error object is required')
tcase.failure = {
'#': error.stack,
'@message': error.message,
'@type': error.code
}
},
skip (reason) {
if (typeof reason !== 'string') {
reason = JSON.stringify(reason, null, 2)
}
tcase.skipped = {
'#': reason
}
},
end () {
tcase['@time'] = Math.round((Date.now() - startTime) / 1000)
}
}
}
}
}
module.exports = createJunitReporter

View File

@ -0,0 +1,482 @@
/*
* Copyright Elasticsearch B.V. and contributors
* SPDX-License-Identifier: Apache-2.0
*/
'use strict'
const { join, sep } = require('node:path')
const { readFileSync, writeFileSync, promises } = require('node:fs')
const yaml = require('js-yaml')
const { rimraf } = require('rimraf')
const { mkdir } = promises
const generatedTestsPath = join(__dirname, '..', '..', 'generated-tests')
const stackSkips = [
// test definition bug: response is empty string
'cat/fielddata.yml',
// test definition bug: response is empty string
'cluster/delete_voting_config_exclusions.yml',
// test definition bug: response is empty string
'cluster/voting_config_exclusions.yml',
// client bug: ILM request takes a "body" param, but "body" is a special keyword in the JS client
'ilm/10_basic.yml',
// health report is... not healthy
'health_report.yml',
// TODO: `contains` action only supports checking for primitives inside arrays or strings inside strings, not referenced values like objects inside arrays
'entsearch/10_basic.yml',
// test definition bug: error message does not match
'entsearch/30_sync_jobs_stack.yml',
// no handler found for uri [/knn_test/_knn_search]
'knn_search.yml',
// TODO: fix license on ES startup - "Operation failed: Current license is basic."
'license/10_stack.yml',
// response.body should be truthy. found: ""
'logstash/10_basic.yml',
// test definition bug? security_exception: unable to authenticate user [x_pack_rest_user] for REST request [/_ml/trained_models/test_model/definition/0]
'machine_learning/clear_tm_deployment_cache.yml',
// client bug: 0.99995 does not equal 0.5
'machine_learning/data_frame_evaluate.yml',
// test definition bug? regex has whitespace, maybe needs to be removed
'machine_learning/explain_data_frame_analytics.yml',
// client bug: 4 != 227
'machine_learning/preview_datafeed.yml',
// test definition bug: error message does not match
'machine_learning/revert_model_snapshot.yml',
// test definition bug: error message does not match
'machine_learning/update_model_snapshot.yml',
// version_conflict_engine_exception
'machine_learning/jobs_crud.yml',
// test definition bug: error message does not match
'machine_learning/model_snapshots.yml',
// test definition bug: error message does not match
'query_rules/30_test.yml',
// client bug: 0 != 0.1
'script/10_basic.yml',
// client bug: request takes a "body" param, but "body" is a special keyword in the JS client
'searchable_snapshots/10_basic.yml',
// test builder bug: does `match` action need to support "array contains value"?
'security/10_api_key_basic.yml',
// test definition bug: error message does not match
'security/140_user.yml',
// test definition bug: error message does not match
'security/30_privileges_stack.yml',
// unknown issue: $profile.enabled path doesn't exist in response
'security/130_user_profile.yml',
// test definition bug: error message does not match
'security/change_password.yml',
// test builder bug: media_type_header_exception
'simulate/ingest.yml',
// client bug: request takes a "body" param, but "body" is a special keyword in the JS client
'snapshot/10_basic.yml',
// test definition bug: illegal_argument_exception
'sql/10_basic.yml',
// test definition bug: illegal_argument_exception
'text_structure/10_basic.yml',
// test definition bug: illegal_argument_exception
'transform/10_basic.yml',
]
const serverlessSkips = [
// TODO: sql.getAsync does not set a content-type header but ES expects one
// transport only sets a content-type if the body is not empty
'sql/10_basic.yml',
// TODO: bulk call in setup fails due to "malformed action/metadata line"
// bulk body is being sent as a Buffer, unsure if related.
'transform/10_basic.yml',
// TODO: scripts_painless_execute expects {"result":"0.1"}, gets {"result":"0"}
// body sent as Buffer, unsure if related
'script/10_basic.yml',
// TODO: expects {"outlier_detection.auc_roc.value":0.99995}, gets {"outlier_detection.auc_roc.value":0.5}
// remove if/when https://github.com/elastic/elasticsearch-clients-tests/issues/37 is resolved
'machine_learning/data_frame_evaluate.yml',
// TODO: Cannot perform requested action because job [job-crud-test-apis] is not open
'machine_learning/jobs_crud.yml',
// TODO: test runner needs to support ignoring 410 errors
'enrich/10_basic.yml',
// TODO: parameter `enabled` is not allowed in source
// Same underlying problem as https://github.com/elastic/elasticsearch-clients-tests/issues/55
'cluster/component_templates.yml',
// TODO: expecting `ct_field` field mapping to be returned, but instead only finds `field`
'indices/simulate_template.yml',
'indices/simulate_index_template.yml',
// TODO: test currently times out
'inference/10_basic.yml',
// TODO: Fix: "Trained model deployment [test_model] is not allocated to any nodes"
'machine_learning/20_trained_model_serverless.yml',
// TODO: query_rules api not available yet
'query_rules/10_query_rules.yml',
'query_rules/20_rulesets.yml',
'query_rules/30_test.yml',
// TODO: security.putRole API not available
'security/50_roles_serverless.yml',
// TODO: expected undefined to equal 'some_table'
'entsearch/50_connector_updates.yml',
// TODO: resource_not_found_exception
'tasks_serverless.yml',
]
function parse (data) {
let doc
try {
doc = yaml.load(data, { schema: yaml.CORE_SCHEMA })
} catch (err) {
console.error(err)
return
}
return doc
}
async function build (yamlFiles, clientOptions) {
await rimraf(generatedTestsPath)
await mkdir(generatedTestsPath, { recursive: true })
for (const file of yamlFiles) {
const apiName = file.split(`${sep}tests${sep}`)[1]
const data = readFileSync(file, 'utf8')
const tests = data
.split('\n---\n')
.map(s => s.trim())
// empty strings
.filter(Boolean)
.map(parse)
// null values
.filter(Boolean)
let code = "import { test } from 'tap'\n"
code += "import { Client } from '@elastic/elasticsearch'\n\n"
const requires = tests.find(test => test.requires != null)
let skip = new Set()
if (requires != null) {
const { serverless = true, stack = true } = requires.requires
if (!serverless) skip.add('process.env.TEST_ES_SERVERLESS === "1"')
if (!stack) skip.add('process.env.TEST_ES_STACK === "1"')
}
if (stackSkips.includes(apiName)) skip.add('process.env.TEST_ES_STACK === "1"')
if (serverlessSkips.includes(apiName)) skip.add('process.env.TEST_ES_SERVERLESS === "1"')
if (skip.size > 0) {
code += `test('${apiName}', { skip: ${Array.from(skip).join(' || ')} }, t => {\n`
} else {
code += `test('${apiName}', t => {\n`
}
for (const test of tests) {
if (test.setup != null) {
code += ' t.before(async () => {\n'
code += indent(buildActions(test.setup), 4)
code += ' })\n\n'
}
if (test.teardown != null) {
code += ' t.after(async () => {\n'
code += indent(buildActions(test.teardown), 4)
code += ' })\n\n'
}
for (const key of Object.keys(test).filter(k => !['setup', 'teardown', 'requires'].includes(k))) {
if (test[key].find(action => Object.keys(action)[0] === 'skip') != null) {
code += ` t.test('${key}', { skip: true }, async t => {\n`
} else {
code += ` t.test('${key}', async t => {\n`
}
code += indent(buildActions(test[key]), 4)
code += '\n t.end()\n'
code += ' })\n'
}
// if (test.requires != null) requires = test.requires
}
code += '\n t.end()\n'
code += '})\n'
const testDir = join(generatedTestsPath, apiName.split(sep).slice(0, -1).join(sep))
const testFile = join(testDir, apiName.split(sep).pop().replace(/\.ya?ml$/, '.mjs'))
await mkdir(testDir, { recursive: true })
writeFileSync(testFile, code, 'utf8')
}
function buildActions (actions) {
let code = `const client = new Client(${JSON.stringify(clientOptions, null, 2)})\n`
code += 'let response\n\n'
const vars = new Set()
for (const action of actions) {
const key = Object.keys(action)[0]
switch (key) {
case 'do':
code += buildDo(action.do)
break
case 'set':
const setResult = buildSet(action.set, vars)
vars.add(setResult.varName)
code += setResult.code
break
case 'transform_and_set':
code += buildTransformAndSet(action.transform_and_set)
break
case 'match':
code += buildMatch(action.match)
break
case 'lt':
code += buildLt(action.lt)
break
case 'lte':
code += buildLte(action.lte)
break
case 'gt':
code += buildGt(action.gt)
break
case 'gte':
code += buildGte(action.gte)
break
case 'length':
code += buildLength(action.length)
break
case 'is_true':
code += buildIsTrue(action.is_true)
break
case 'is_false':
code += buildIsFalse(action.is_false)
break
case 'contains':
code += buildContains(action.contains)
break
case 'exists':
code += buildExists(action.exists)
break
case 'skip':
break
default:
console.warn(`Action not supported: ${key}`)
break
}
}
return code
}
}
function buildDo (action) {
let code = ''
const keys = Object.keys(action)
if (keys.includes('catch')) {
code += 'try {\n'
code += indent(buildRequest(action), 2)
code += '} catch (err) {\n'
code += ` t.match(err.toString(), ${buildValLiteral(action.catch)})\n`
code += '}\n'
} else {
code += buildRequest(action)
}
return code
}
function buildRequest(action) {
let code = ''
const options = { meta: true }
for (const key of Object.keys(action)) {
if (key === 'catch') continue
if (key === 'headers') {
options.headers = action.headers
continue
}
const params = action[key]
if (params.ignore != null) {
if (Array.isArray(params.ignore)) {
options.ignore = params.ignore
} else {
options.ignore = [params.ignore]
}
}
code += `response = await client.${toCamelCase(key)}(${buildApiParams(action[key])}, ${JSON.stringify(options)})\n`
}
return code
}
function buildSet (action, vars) {
const key = Object.keys(action)[0]
const varName = action[key]
const lookup = buildLookup(key)
let code = ''
if (vars.has(varName)) {
code = `${varName} = ${lookup}\n`
} else {
code =`let ${varName} = ${lookup}\n`
}
return { code, varName }
}
function buildTransformAndSet (action) {
return `// TODO buildTransformAndSet: ${JSON.stringify(action)}\n`
}
function buildMatch (action) {
const key = Object.keys(action)[0]
let lookup = buildLookup(key)
const val = buildValLiteral(action[key])
return `t.match(${lookup}, ${val})\n`
}
function buildLt (action) {
const key = Object.keys(action)[0]
const lookup = buildLookup(key)
const val = buildValLiteral(action[key])
return `t.ok(${lookup} < ${val})\n`
}
function buildLte (action) {
const key = Object.keys(action)[0]
const lookup = buildLookup(key)
const val = buildValLiteral(action[key])
return `t.ok(${lookup} <= ${val})\n`
}
function buildGt (action) {
const key = Object.keys(action)[0]
const lookup = buildLookup(key)
const val = buildValLiteral(action[key])
return `t.ok(${lookup} > ${val})\n`
}
function buildGte (action) {
const key = Object.keys(action)[0]
const lookup = buildLookup(key)
const val = buildValLiteral(action[key])
return `t.ok(${lookup} >= ${val})\n`
}
function buildLength (action) {
const key = Object.keys(action)[0]
const lookup = buildLookup(key)
const val = buildValLiteral(action[key])
let code = ''
code += `if (typeof ${lookup} === 'object' && !Array.isArray(${lookup})) {\n`
code += ` t.equal(Object.keys(${lookup}).length, ${val})\n`
code += `} else {\n`
code += ` t.equal(${lookup}.length, ${val})\n`
code += `}\n`
return code
}
function buildIsTrue (action) {
let lookup = `${buildLookup(action)}`
let errMessage = `\`${action} should be truthy. found: '\$\{JSON.stringify(${lookup})\}'\``
if (lookup.includes('JSON.stringify')) errMessage = `\`${action} should be truthy. found: '\$\{${lookup}\}'\``
return `t.ok(${lookup} === "true" || (Boolean(${lookup}) && ${lookup} !== "false"), ${errMessage})\n`
}
function buildIsFalse (action) {
let lookup = `${buildLookup(action)}`
let errMessage = `\`${action} should be falsy. found: '\$\{JSON.stringify(${lookup})\}'\``
if (lookup.includes('JSON.stringify')) errMessage = `\`${action} should be falsy. found: '\$\{${lookup}\}'\``
return `t.ok(${lookup} === "false" || !Boolean(${lookup}), ${errMessage})\n`
}
function buildContains (action) {
const key = Object.keys(action)[0]
const lookup = buildLookup(key)
const val = buildValLiteral(action[key])
return `t.ok(${lookup}.includes(${val}), '${JSON.stringify(val)} not found in ${key}')\n`
}
function buildExists (keyName) {
const lookup = buildLookup(keyName)
return `t.ok(${lookup} != null, \`Key "${keyName}" not found in response body: \$\{JSON.stringify(response.body, null, 2)\}\`)\n`
}
function buildApiParams (params) {
if (Object.keys(params).length === 0) {
return 'undefined'
} else {
const out = {}
Object.keys(params).filter(k => k !== 'ignore' && k !== 'headers').forEach(k => out[k] = params[k])
return buildValLiteral(out)
}
}
function toCamelCase (name) {
return name.replace(/_([a-z])/g, g => g[1].toUpperCase())
}
function indent (str, spaces) {
const tabs = ' '.repeat(spaces)
return str.replace(/\s+$/, '').split('\n').map(l => `${tabs}${l}`).join('\n') + '\n'
}
function buildLookup (path) {
if (path === '$body') return '(typeof response.body === "string" ? response.body : JSON.stringify(response.body))'
const outPath = path.split('.').map(step => {
if (parseInt(step, 10).toString() === step) {
return `[${step}]`
} else if (step.match(/^\$[a-zA-Z0-9_]+$/)) {
const lookup = step.replace(/^\$/, '')
if (lookup === 'body') return ''
return `[${lookup}]`
} else if (step === '') {
return ''
} else {
return `['${step}']`
}
}).join('')
return `response.body${outPath}`
}
function buildValLiteral (val) {
if (typeof val === 'string') val = val.trim()
if (isRegExp(val)) {
return JSON.stringify(val).replace(/^"/, '').replace(/"$/, '').replaceAll('\\\\', '\\')
} else if (isVariable(val)) {
if (val === '$body') return 'JSON.stringify(response.body)'
return val.replace(/^\$/, '')
} else if (isPlainObject(val)) {
return JSON.stringify(cleanObject(val), null, 2).replace(/"\$([a-zA-Z0-9_]+)"/g, '$1')
} else {
return JSON.stringify(val)
}
}
function isRegExp (str) {
return typeof str === 'string' && str.startsWith('/') && str.endsWith('/')
}
function isVariable (str) {
return typeof str === 'string' && str.match(/^\$[a-zA-Z0-9_]+$/) != null
}
function cleanObject (obj) {
Object.keys(obj).forEach(key => {
let val = obj[key]
if (typeof val === 'string' && val.trim().startsWith('{') && val.trim().endsWith('}')) {
// attempt to parse as object
try {
val = JSON.parse(val)
} catch {
}
} else if (isPlainObject(val)) {
val = cleanObject(val)
} else if (Array.isArray(val)) {
val = val.map(item => isPlainObject(item) ? cleanObject(item) : item)
}
obj[key] = val
})
return obj
}
function isPlainObject(obj) {
return typeof obj === 'object' && !Array.isArray(obj) && obj != null
}
module.exports = build

File diff suppressed because it is too large Load Diff

View File

@ -77,6 +77,31 @@ test('Missing node(s)', t => {
t.end()
})
test('multi nodes with roles, using default node filter', async t => {
const client = new Client({
nodes: [
{
url: new URL('http://node1:9200'),
roles: { master: true, data: false, ingest: false, ml: false }
},
{
url: new URL('http://node2:9200'),
roles: { master: true, data: true, ingest: false, ml: false }
},
]
})
const conn = client.connectionPool.getConnection({
now: Date.now() + 1000 * 60 * 3,
requestId: 1,
name: 'elasticsearch-js',
context: null
})
t.equal(conn?.url.hostname, 'node2')
t.end()
})
test('Custom headers', t => {
const client = new Client({
node: 'http://localhost:9200',

View File

@ -172,17 +172,28 @@ test('ES|QL helper', t => {
t.end()
})
test('toArrowReader', t => {
t.test('Parses a binary response into an Arrow stream reader', async t => {
const binaryContent = '/////zABAAAQAAAAAAAKAA4ABgANAAgACgAAAAAABAAQAAAAAAEKAAwAAAAIAAQACgAAAAgAAAAIAAAAAAAAAAIAAAB8AAAABAAAAJ7///8UAAAARAAAAEQAAAAAAAoBRAAAAAEAAAAEAAAAjP///wgAAAAQAAAABAAAAGRhdGUAAAAADAAAAGVsYXN0aWM6dHlwZQAAAAAAAAAAgv///wAAAQAEAAAAZGF0ZQAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABMAAAAVAAAAAAAAwFUAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAYAAABkb3VibGUAAAwAAABlbGFzdGljOnR5cGUAAAAAAAAAAAAABgAIAAYABgAAAAAAAgAGAAAAYW1vdW50AAAAAAAA/////7gAAAAUAAAAAAAAAAwAFgAOABUAEAAEAAwAAABgAAAAAAAAAAAABAAQAAAAAAMKABgADAAIAAQACgAAABQAAABYAAAABQAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAQAAAAAAAAAIAAAAAAAAACgAAAAAAAAAMAAAAAAAAAABAAAAAAAAADgAAAAAAAAAKAAAAAAAAAAAAAAAAgAAAAUAAAAAAAAAAAAAAAAAAAAFAAAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAACgmZkTQAAAAGBmZiBAAAAAAAAAL0AAAADAzMwjQAAAAMDMzCtAHwAAAAAAAADV6yywkgEAANWPBquSAQAA1TPgpZIBAADV17mgkgEAANV7k5uSAQAA/////wAAAAA='
test('toArrowReader', async t => {
const testRecords = [
{ amount: 4.900000095367432, },
{ amount: 8.199999809265137, },
{ amount: 15.5, },
{ amount: 9.899999618530273, },
{ amount: 13.899999618530273, },
]
// build reusable Arrow table
const table = arrow.tableFromJSON(testRecords)
const rawData = await arrow.RecordBatchStreamWriter.writeAll(table).toUint8Array()
t.test('Parses a binary response into an Arrow stream reader', async t => {
const MockConnection = connection.buildMockConnection({
onRequest (_params) {
return {
body: Buffer.from(binaryContent, 'base64'),
body: Buffer.from(rawData),
statusCode: 200,
headers: {
'content-type': 'application/vnd.elasticsearch+arrow+stream'
'content-type': 'application/vnd.elasticsearch+arrow+stream',
'transfer-encoding': 'chunked'
}
}
}
@ -196,26 +207,28 @@ test('ES|QL helper', t => {
const result = await client.helpers.esql({ query: 'FROM sample_data' }).toArrowReader()
t.ok(result.isStream())
const recordBatch = result.next().value
t.same(recordBatch.get(0)?.toJSON(), {
amount: 4.900000095367432,
date: 1729532586965,
})
let count = 0
for await (const recordBatch of result) {
for (const record of recordBatch) {
t.same(record.toJSON(), testRecords[count])
count++
}
}
t.end()
})
t.test('ESQL helper uses correct x-elastic-client-meta helper value', async t => {
const binaryContent = '/////zABAAAQAAAAAAAKAA4ABgANAAgACgAAAAAABAAQAAAAAAEKAAwAAAAIAAQACgAAAAgAAAAIAAAAAAAAAAIAAAB8AAAABAAAAJ7///8UAAAARAAAAEQAAAAAAAoBRAAAAAEAAAAEAAAAjP///wgAAAAQAAAABAAAAGRhdGUAAAAADAAAAGVsYXN0aWM6dHlwZQAAAAAAAAAAgv///wAAAQAEAAAAZGF0ZQAAEgAYABQAEwASAAwAAAAIAAQAEgAAABQAAABMAAAAVAAAAAAAAwFUAAAAAQAAAAwAAAAIAAwACAAEAAgAAAAIAAAAEAAAAAYAAABkb3VibGUAAAwAAABlbGFzdGljOnR5cGUAAAAAAAAAAAAABgAIAAYABgAAAAAAAgAGAAAAYW1vdW50AAAAAAAA/////7gAAAAUAAAAAAAAAAwAFgAOABUAEAAEAAwAAABgAAAAAAAAAAAABAAQAAAAAAMKABgADAAIAAQACgAAABQAAABYAAAABQAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAQAAAAAAAAAIAAAAAAAAACgAAAAAAAAAMAAAAAAAAAABAAAAAAAAADgAAAAAAAAAKAAAAAAAAAAAAAAAAgAAAAUAAAAAAAAAAAAAAAAAAAAFAAAAAAAAAAAAAAAAAAAAHwAAAAAAAAAAAACgmZkTQAAAAGBmZiBAAAAAAAAAL0AAAADAzMwjQAAAAMDMzCtAHwAAAAAAAADV6yywkgEAANWPBquSAQAA1TPgpZIBAADV17mgkgEAANV7k5uSAQAA/////wAAAAA='
const MockConnection = connection.buildMockConnection({
onRequest (params) {
const header = params.headers?.['x-elastic-client-meta'] ?? ''
t.ok(header.includes('h=qa'), `Client meta header does not include ESQL helper value: ${header}`)
return {
body: Buffer.from(binaryContent, 'base64'),
body: Buffer.from(rawData),
statusCode: 200,
headers: {
'content-type': 'application/vnd.elasticsearch+arrow+stream'
'content-type': 'application/vnd.elasticsearch+arrow+stream',
'transfer-encoding': 'chunked'
}
}
}
@ -254,10 +267,12 @@ test('ES|QL helper', t => {
new arrow.RecordBatch(schema, batch3.data),
])
const rawData = await arrow.RecordBatchStreamWriter.writeAll(table).toUint8Array()
const MockConnection = connection.buildMockConnection({
onRequest (_params) {
return {
body: Buffer.from(arrow.tableToIPC(table, "stream")),
body: Buffer.from(rawData),
statusCode: 200,
headers: {
'content-type': 'application/vnd.elasticsearch+arrow+stream'
@ -275,7 +290,7 @@ test('ES|QL helper', t => {
t.ok(result.isStream())
let counter = 0
for (const batch of result) {
for await (const batch of result) {
for (const row of batch) {
counter++
const { id, val } = row.toJSON()