Migrate integration tests to built JS files (#2750)
This commit is contained in:
@ -10,436 +10,63 @@ process.on('unhandledRejection', function (err) {
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
const { writeFileSync, readFileSync, readdirSync, statSync } = require('fs')
|
||||
const { join, sep } = require('path')
|
||||
const yaml = require('js-yaml')
|
||||
const minimist = require('minimist')
|
||||
const ms = require('ms')
|
||||
const { Client } = require('../../index')
|
||||
const build = require('./test-runner')
|
||||
const { sleep } = require('./helper')
|
||||
const createJunitReporter = require('./reporter')
|
||||
const assert = require('node:assert')
|
||||
const url = require('node:url')
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
const globby = require('globby')
|
||||
const semver = require('semver')
|
||||
const downloadArtifacts = require('../../scripts/download-artifacts')
|
||||
|
||||
const yamlFolder = downloadArtifacts.locations.freeTestFolder
|
||||
const xPackYamlFolder = downloadArtifacts.locations.xPackTestFolder
|
||||
const buildTests = require('./test-builder')
|
||||
|
||||
const MAX_API_TIME = 1000 * 90
|
||||
const MAX_FILE_TIME = 1000 * 30
|
||||
const MAX_TEST_TIME = 1000 * 3
|
||||
const yamlFolder = downloadArtifacts.locations.testYamlFolder
|
||||
|
||||
const options = minimist(process.argv.slice(2), {
|
||||
boolean: ['bail'],
|
||||
string: ['suite', 'test']
|
||||
})
|
||||
|
||||
const freeSkips = {
|
||||
// working on fixes for these
|
||||
'/free/aggregations/bucket_selector.yml': ['bad script'],
|
||||
'/free/aggregations/bucket_script.yml': ['bad script'],
|
||||
|
||||
// either the YAML test definition is wrong, or this fails because JSON.stringify is coercing "1.0" to "1"
|
||||
'/free/aggregations/percentiles_bucket.yml': ['*'],
|
||||
|
||||
// not supported yet
|
||||
'/free/cluster.desired_nodes/10_basic.yml': ['*'],
|
||||
|
||||
// Cannot find methods on `Internal` object
|
||||
'/free/cluster.desired_balance/10_basic.yml': ['*'],
|
||||
'/free/cluster.desired_nodes/20_dry_run.yml': ['*'],
|
||||
'/free/cluster.prevalidate_node_removal/10_basic.yml': ['*'],
|
||||
|
||||
// the v8 client never sends the scroll_id in querystring,
|
||||
// the way the test is structured causes a security exception
|
||||
'free/scroll/10_basic.yml': ['Body params override query string'],
|
||||
'free/scroll/11_clear.yml': [
|
||||
'Body params with array param override query string',
|
||||
'Body params with string param scroll id override query string'
|
||||
],
|
||||
'free/cat.allocation/10_basic.yml': ['*'],
|
||||
'free/cat.snapshots/10_basic.yml': ['Test cat snapshots output'],
|
||||
|
||||
'indices.stats/50_disk_usage.yml': ['Disk usage stats'],
|
||||
'indices.stats/60_field_usage.yml': ['Field usage stats'],
|
||||
|
||||
// skipping because we are booting ES with `discovery.type=single-node`
|
||||
// and this test will fail because of this configuration
|
||||
'nodes.stats/30_discovery.yml': ['*'],
|
||||
|
||||
// the expected error is returning a 503,
|
||||
// which triggers a retry and the node to be marked as dead
|
||||
'search.aggregation/240_max_buckets.yml': ['*'],
|
||||
|
||||
// long values and json do not play nicely together
|
||||
'search.aggregation/40_range.yml': ['Min and max long range bounds'],
|
||||
|
||||
// the yaml runner assumes that null means "does not exists",
|
||||
// while null is a valid json value, so the check will fail
|
||||
'search/320_disallow_queries.yml': ['Test disallow expensive queries'],
|
||||
'free/tsdb/90_unsupported_operations.yml': ['noop update']
|
||||
}
|
||||
|
||||
const platinumDenyList = {
|
||||
'api_key/10_basic.yml': ['Test get api key'],
|
||||
'api_key/20_query.yml': ['*'],
|
||||
'api_key/11_invalidation.yml': ['Test invalidate api key by realm name'],
|
||||
'analytics/histogram.yml': ['Histogram requires values in increasing order'],
|
||||
|
||||
// object keys must me strings, and `0.0.toString()` is `0`
|
||||
'ml/evaluate_data_frame.yml': [
|
||||
'Test binary_soft_classifition precision',
|
||||
'Test binary_soft_classifition recall',
|
||||
'Test binary_soft_classifition confusion_matrix'
|
||||
],
|
||||
|
||||
// The cleanup fails with a index not found when retrieving the jobs
|
||||
'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'],
|
||||
'ml/bucket_correlation_agg.yml': ['Test correlation bucket agg simple'],
|
||||
|
||||
// start should be a string
|
||||
'ml/jobs_get_result_overall_buckets.yml': ['Test overall buckets given epoch start and end params'],
|
||||
|
||||
// this can't happen with the client
|
||||
'ml/start_data_frame_analytics.yml': ['Test start with inconsistent body/param ids'],
|
||||
'ml/stop_data_frame_analytics.yml': ['Test stop with inconsistent body/param ids'],
|
||||
'ml/preview_datafeed.yml': ['*'],
|
||||
|
||||
// Investigate why is failing
|
||||
'ml/inference_crud.yml': ['*'],
|
||||
'ml/categorization_agg.yml': ['Test categorization aggregation with poor settings'],
|
||||
'ml/filter_crud.yml': ['*'],
|
||||
|
||||
// investigate why this is failing
|
||||
'monitoring/bulk/10_basic.yml': ['*'],
|
||||
'monitoring/bulk/20_privileges.yml': ['*'],
|
||||
'license/20_put_license.yml': ['*'],
|
||||
'snapshot/10_basic.yml': ['*'],
|
||||
'snapshot/20_operator_privileges_disabled.yml': ['*'],
|
||||
|
||||
// the body is correct, but the regex is failing
|
||||
'sql/sql.yml': ['Getting textual representation'],
|
||||
'searchable_snapshots/10_usage.yml': ['*'],
|
||||
'service_accounts/10_basic.yml': ['*'],
|
||||
|
||||
// we are setting two certificates in the docker config
|
||||
'ssl/10_basic.yml': ['*'],
|
||||
'token/10_basic.yml': ['*'],
|
||||
'token/11_invalidation.yml': ['*'],
|
||||
|
||||
// very likely, the index template has not been loaded yet.
|
||||
// we should run a indices.existsTemplate, but the name of the
|
||||
// template may vary during time.
|
||||
'transforms_crud.yml': [
|
||||
'Test basic transform crud',
|
||||
'Test transform with query and array of indices in source',
|
||||
'Test PUT continuous transform',
|
||||
'Test PUT continuous transform without delay set'
|
||||
],
|
||||
'transforms_force_delete.yml': [
|
||||
'Test force deleting a running transform'
|
||||
],
|
||||
'transforms_cat_apis.yml': ['*'],
|
||||
'transforms_start_stop.yml': ['*'],
|
||||
'transforms_stats.yml': ['*'],
|
||||
'transforms_stats_continuous.yml': ['*'],
|
||||
'transforms_update.yml': ['*'],
|
||||
|
||||
// js does not support ulongs
|
||||
'unsigned_long/10_basic.yml': ['*'],
|
||||
'unsigned_long/20_null_value.yml': ['*'],
|
||||
'unsigned_long/30_multi_fields.yml': ['*'],
|
||||
'unsigned_long/40_different_numeric.yml': ['*'],
|
||||
'unsigned_long/50_script_values.yml': ['*'],
|
||||
|
||||
// the v8 client flattens the body into the parent object
|
||||
'platinum/users/10_basic.yml': ['Test put user with different username in body'],
|
||||
|
||||
// docker issue?
|
||||
'watcher/execute_watch/60_http_input.yml': ['*'],
|
||||
|
||||
// the checks are correct, but for some reason the test is failing on js side
|
||||
// I bet is because the backslashes in the rg
|
||||
'watcher/execute_watch/70_invalid.yml': ['*'],
|
||||
'watcher/put_watch/10_basic.yml': ['*'],
|
||||
'xpack/15_basic.yml': ['*'],
|
||||
|
||||
// test that are failing that needs to be investigated
|
||||
// the error cause can either be in the yaml test or in the specification
|
||||
|
||||
// start should be a string in the yaml test
|
||||
'platinum/ml/delete_job_force.yml': ['Test force delete an open job that is referred by a started datafeed'],
|
||||
'platinum/ml/evaluate_data_frame.yml': ['*'],
|
||||
'platinum/ml/get_datafeed_stats.yml': ['*'],
|
||||
|
||||
// start should be a string in the yaml test
|
||||
'platinum/ml/start_stop_datafeed.yml': ['*']
|
||||
}
|
||||
|
||||
function runner (opts = {}) {
|
||||
const options = { node: opts.node }
|
||||
if (opts.isXPack) {
|
||||
options.tls = {
|
||||
ca: readFileSync(join(__dirname, '..', '..', '.buildkite', 'certs', 'ca.crt'), 'utf8'),
|
||||
rejectUnauthorized: false
|
||||
const getAllFiles = async dir => {
|
||||
const files = await globby(dir, {
|
||||
expandDirectories: {
|
||||
extensions: ['yml', 'yaml']
|
||||
}
|
||||
}
|
||||
const client = new Client(options)
|
||||
log('Loading yaml suite')
|
||||
start({ client, isXPack: opts.isXPack })
|
||||
.catch(err => {
|
||||
if (err.name === 'ResponseError') {
|
||||
console.error(err)
|
||||
console.log(JSON.stringify(err.meta, null, 2))
|
||||
} else {
|
||||
console.error(err)
|
||||
}
|
||||
process.exit(1)
|
||||
})
|
||||
})
|
||||
return files.sort()
|
||||
}
|
||||
|
||||
async function waitCluster (client, times = 0) {
|
||||
try {
|
||||
await client.cluster.health({ wait_for_status: 'green', timeout: '50s' })
|
||||
} catch (err) {
|
||||
if (++times < 10) {
|
||||
await sleep(5000)
|
||||
return waitCluster(client, times)
|
||||
}
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function start ({ client, isXPack }) {
|
||||
log('Waiting for Elasticsearch')
|
||||
await waitCluster(client)
|
||||
|
||||
const body = await client.info()
|
||||
const { number: version, build_hash: hash } = body.version
|
||||
|
||||
log(`Downloading artifacts for hash ${hash}...`)
|
||||
await downloadArtifacts({ hash, version })
|
||||
|
||||
log(`Testing ${isXPack ? 'Platinum' : 'Free'} api...`)
|
||||
const junit = createJunitReporter()
|
||||
const junitTestSuites = junit.testsuites(`Integration test for ${isXPack ? 'Platinum' : 'Free'} api`)
|
||||
|
||||
const stats = {
|
||||
total: 0,
|
||||
skip: 0,
|
||||
pass: 0,
|
||||
assertions: 0
|
||||
}
|
||||
const folders = getAllFiles(isXPack ? xPackYamlFolder : yamlFolder)
|
||||
.filter(t => !/(README|TODO)/g.test(t))
|
||||
// we cluster the array based on the folder names,
|
||||
// to provide a better test log output
|
||||
.reduce((arr, file) => {
|
||||
const path = file.slice(file.indexOf('/rest-api-spec/test'), file.lastIndexOf('/'))
|
||||
let inserted = false
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
if (arr[i][0].includes(path)) {
|
||||
inserted = true
|
||||
arr[i].push(file)
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!inserted) arr.push([file])
|
||||
return arr
|
||||
}, [])
|
||||
|
||||
const totalTime = now()
|
||||
for (const folder of folders) {
|
||||
// pretty name
|
||||
const apiName = folder[0].slice(
|
||||
folder[0].indexOf(`${sep}rest-api-spec${sep}test`) + 19,
|
||||
folder[0].lastIndexOf(sep)
|
||||
)
|
||||
|
||||
log('Testing ' + apiName.slice(1))
|
||||
const apiTime = now()
|
||||
|
||||
for (const file of folder) {
|
||||
const testRunner = build({
|
||||
client,
|
||||
version,
|
||||
isXPack: file.includes('platinum')
|
||||
})
|
||||
const fileTime = now()
|
||||
const data = readFileSync(file, 'utf8')
|
||||
// get the test yaml (as object), some file has multiple yaml documents inside,
|
||||
// every document is separated by '---', so we split on the separator
|
||||
// and then we remove the empty strings, finally we parse them
|
||||
const tests = data
|
||||
.split('\n---\n')
|
||||
.map(s => s.trim())
|
||||
// empty strings
|
||||
.filter(Boolean)
|
||||
.map(parse)
|
||||
// null values
|
||||
.filter(Boolean)
|
||||
|
||||
// get setup and teardown if present
|
||||
let setupTest = null
|
||||
let teardownTest = null
|
||||
for (const test of tests) {
|
||||
if (test.setup) setupTest = test.setup
|
||||
if (test.teardown) teardownTest = test.teardown
|
||||
}
|
||||
|
||||
const cleanPath = file.slice(file.lastIndexOf(apiName))
|
||||
|
||||
// skip if --suite CLI arg doesn't match
|
||||
if (options.suite && !cleanPath.endsWith(options.suite)) continue
|
||||
|
||||
log(' ' + cleanPath)
|
||||
const junitTestSuite = junitTestSuites.testsuite(apiName.slice(1) + ' - ' + cleanPath)
|
||||
|
||||
for (const test of tests) {
|
||||
const testTime = now()
|
||||
const name = Object.keys(test)[0]
|
||||
|
||||
// skip setups, teardowns and anything that doesn't match --test flag when present
|
||||
if (name === 'setup' || name === 'teardown') continue
|
||||
if (options.test && !name.endsWith(options.test)) continue
|
||||
|
||||
const junitTestCase = junitTestSuite.testcase(name, `node_${process.version}: ${cleanPath}`)
|
||||
|
||||
stats.total += 1
|
||||
if (shouldSkip(isXPack, file, name)) {
|
||||
stats.skip += 1
|
||||
junitTestCase.skip('This test is in the skip list of the client')
|
||||
junitTestCase.end()
|
||||
continue
|
||||
}
|
||||
log(' - ' + name)
|
||||
try {
|
||||
await testRunner.run(setupTest, test[name], teardownTest, stats, junitTestCase)
|
||||
stats.pass += 1
|
||||
} catch (err) {
|
||||
junitTestCase.failure(err)
|
||||
junitTestCase.end()
|
||||
junitTestSuite.end()
|
||||
junitTestSuites.end()
|
||||
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
|
||||
err.meta = JSON.stringify(err.meta ?? {}, null, 2)
|
||||
console.error(err)
|
||||
|
||||
if (options.bail) {
|
||||
process.exit(1)
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
const totalTestTime = now() - testTime
|
||||
junitTestCase.end()
|
||||
if (totalTestTime > MAX_TEST_TIME) {
|
||||
log(' took too long: ' + ms(totalTestTime))
|
||||
} else {
|
||||
log(' took: ' + ms(totalTestTime))
|
||||
}
|
||||
}
|
||||
junitTestSuite.end()
|
||||
const totalFileTime = now() - fileTime
|
||||
if (totalFileTime > MAX_FILE_TIME) {
|
||||
log(` ${cleanPath} took too long: ` + ms(totalFileTime))
|
||||
} else {
|
||||
log(` ${cleanPath} took: ` + ms(totalFileTime))
|
||||
}
|
||||
}
|
||||
const totalApiTime = now() - apiTime
|
||||
if (totalApiTime > MAX_API_TIME) {
|
||||
log(`${apiName} took too long: ` + ms(totalApiTime))
|
||||
} else {
|
||||
log(`${apiName} took: ` + ms(totalApiTime))
|
||||
}
|
||||
}
|
||||
junitTestSuites.end()
|
||||
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
|
||||
log(`Total testing time: ${ms(now() - totalTime)}`)
|
||||
log(`Test stats:
|
||||
- Total: ${stats.total}
|
||||
- Skip: ${stats.skip}
|
||||
- Pass: ${stats.pass}
|
||||
- Fail: ${stats.total - (stats.pass + stats.skip)}
|
||||
- Assertions: ${stats.assertions}
|
||||
`)
|
||||
}
|
||||
|
||||
function log (text) {
|
||||
process.stdout.write(text + '\n')
|
||||
}
|
||||
|
||||
function now () {
|
||||
const ts = process.hrtime()
|
||||
return (ts[0] * 1e3) + (ts[1] / 1e6)
|
||||
}
|
||||
|
||||
function parse (data) {
|
||||
let doc
|
||||
try {
|
||||
doc = yaml.load(data, { schema: yaml.CORE_SCHEMA })
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
function generateJunitXmlReport (junit, suite) {
|
||||
writeFileSync(
|
||||
join(__dirname, '..', '..', `${suite}-report-junit.xml`),
|
||||
junit.prettyPrint()
|
||||
)
|
||||
async function doTestBuilder (version, clientOptions) {
|
||||
await downloadArtifacts(undefined, version)
|
||||
const files = await getAllFiles(yamlFolder)
|
||||
await buildTests(files, clientOptions)
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
const scheme = process.env.TEST_SUITE === 'platinum' ? 'https' : 'http'
|
||||
const node = process.env.TEST_ES_SERVER || `${scheme}://elastic:changeme@localhost:9200`
|
||||
const opts = {
|
||||
node,
|
||||
isXPack: process.env.TEST_SUITE !== 'free'
|
||||
const node = process.env.TEST_ES_SERVER
|
||||
const apiKey = process.env.ES_API_SECRET_KEY
|
||||
const password = process.env.ELASTIC_PASSWORD
|
||||
let version = process.env.STACK_VERSION
|
||||
|
||||
assert(node != null, 'Environment variable missing: TEST_ES_SERVER')
|
||||
assert(apiKey != null || password != null, 'Environment variable missing: ES_API_SECRET_KEY or ELASTIC_PASSWORD')
|
||||
assert(version != null, 'Environment variable missing: STACK_VERSION')
|
||||
|
||||
version = semver.clean(version.includes('SNAPSHOT') ? version.split('-')[0] : version)
|
||||
|
||||
const clientOptions = { node }
|
||||
if (apiKey != null) {
|
||||
clientOptions.auth = { apiKey }
|
||||
} else {
|
||||
clientOptions.auth = { username: 'elastic', password }
|
||||
}
|
||||
runner(opts)
|
||||
}
|
||||
|
||||
const shouldSkip = (isXPack, file, name) => {
|
||||
if (options.suite || options.test) return false
|
||||
|
||||
let list = Object.keys(freeSkips)
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const freeTest = freeSkips[list[i]]
|
||||
for (let j = 0; j < freeTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === freeTest[j] || freeTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
log(`Skipping test ${testName} because it is denylisted in the free test suite`)
|
||||
return true
|
||||
}
|
||||
const nodeUrl = new url.URL(node)
|
||||
if (nodeUrl.protocol === 'https:') {
|
||||
clientOptions.tls = {
|
||||
ca: fs.readFileSync(path.join(__dirname, '..', '..', '.buildkite', 'certs', 'ca.crt'), 'utf8'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
}
|
||||
|
||||
if (file.includes('x-pack') || isXPack) {
|
||||
list = Object.keys(platinumDenyList)
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const platTest = platinumDenyList[list[i]]
|
||||
for (let j = 0; j < platTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
log(`Skipping test ${testName} because it is denylisted in the platinum test suite`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
doTestBuilder(version, clientOptions)
|
||||
.then(() => process.exit(0))
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
const getAllFiles = dir =>
|
||||
readdirSync(dir).reduce((files, file) => {
|
||||
const name = join(dir, file)
|
||||
const isDirectory = statSync(name).isDirectory()
|
||||
return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name]
|
||||
}, [])
|
||||
|
||||
module.exports = runner
|
||||
|
||||
@ -1,115 +0,0 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and contributors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const assert = require('node:assert')
|
||||
const { create } = require('xmlbuilder2')
|
||||
|
||||
function createJunitReporter () {
|
||||
const report = {}
|
||||
|
||||
return { testsuites, prettyPrint }
|
||||
|
||||
function prettyPrint () {
|
||||
return create(report).end({ prettyPrint: true })
|
||||
}
|
||||
|
||||
function testsuites (name) {
|
||||
assert(name, 'The testsuites name is required')
|
||||
assert(report.testsuites === undefined, 'Cannot set more than one testsuites block')
|
||||
const startTime = Date.now()
|
||||
|
||||
report.testsuites = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
|
||||
const testsuiteList = []
|
||||
|
||||
return {
|
||||
testsuite: createTestSuite(testsuiteList),
|
||||
end () {
|
||||
report.testsuites['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
report.testsuites['@tests'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@tests']
|
||||
return acc
|
||||
}, 0)
|
||||
report.testsuites['@failures'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@failures']
|
||||
return acc
|
||||
}, 0)
|
||||
report.testsuites['@skipped'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@skipped']
|
||||
return acc
|
||||
}, 0)
|
||||
if (testsuiteList.length) {
|
||||
report.testsuites.testsuite = testsuiteList
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createTestSuite (testsuiteList) {
|
||||
return function testsuite (name) {
|
||||
assert(name, 'The testsuite name is required')
|
||||
const startTime = Date.now()
|
||||
const suite = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
const testcaseList = []
|
||||
testsuiteList.push(suite)
|
||||
return {
|
||||
testcase: createTestCase(testcaseList),
|
||||
end () {
|
||||
suite['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
suite['@tests'] = testcaseList.length
|
||||
suite['@failures'] = testcaseList.filter(t => t.failure).length
|
||||
suite['@skipped'] = testcaseList.filter(t => t.skipped).length
|
||||
if (testcaseList.length) {
|
||||
suite.testcase = testcaseList
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createTestCase (testcaseList) {
|
||||
return function testcase (name, file) {
|
||||
assert(name, 'The testcase name is required')
|
||||
const startTime = Date.now()
|
||||
const tcase = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
if (file) tcase['@file'] = file
|
||||
testcaseList.push(tcase)
|
||||
return {
|
||||
failure (error) {
|
||||
assert(error, 'The failure error object is required')
|
||||
tcase.failure = {
|
||||
'#': error.stack,
|
||||
'@message': error.message,
|
||||
'@type': error.code
|
||||
}
|
||||
},
|
||||
skip (reason) {
|
||||
if (typeof reason !== 'string') {
|
||||
reason = JSON.stringify(reason, null, 2)
|
||||
}
|
||||
tcase.skipped = {
|
||||
'#': reason
|
||||
}
|
||||
},
|
||||
end () {
|
||||
tcase['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createJunitReporter
|
||||
482
test/integration/test-builder.js
Normal file
482
test/integration/test-builder.js
Normal file
@ -0,0 +1,482 @@
|
||||
/*
|
||||
* Copyright Elasticsearch B.V. and contributors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { join, sep } = require('node:path')
|
||||
const { readFileSync, writeFileSync, promises } = require('node:fs')
|
||||
const yaml = require('js-yaml')
|
||||
const { rimraf } = require('rimraf')
|
||||
const { mkdir } = promises
|
||||
|
||||
const generatedTestsPath = join(__dirname, '..', '..', 'generated-tests')
|
||||
|
||||
const stackSkips = [
|
||||
// test definition bug: response is empty string
|
||||
'cat/fielddata.yml',
|
||||
// test definition bug: response is empty string
|
||||
'cluster/delete_voting_config_exclusions.yml',
|
||||
// test definition bug: response is empty string
|
||||
'cluster/voting_config_exclusions.yml',
|
||||
// client bug: ILM request takes a "body" param, but "body" is a special keyword in the JS client
|
||||
'ilm/10_basic.yml',
|
||||
// health report is... not healthy
|
||||
'health_report.yml',
|
||||
// TODO: `contains` action only supports checking for primitives inside arrays or strings inside strings, not referenced values like objects inside arrays
|
||||
'entsearch/10_basic.yml',
|
||||
// test definition bug: error message does not match
|
||||
'entsearch/30_sync_jobs_stack.yml',
|
||||
// no handler found for uri [/knn_test/_knn_search]
|
||||
'knn_search.yml',
|
||||
// TODO: fix license on ES startup - "Operation failed: Current license is basic."
|
||||
'license/10_stack.yml',
|
||||
// response.body should be truthy. found: ""
|
||||
'logstash/10_basic.yml',
|
||||
// test definition bug? security_exception: unable to authenticate user [x_pack_rest_user] for REST request [/_ml/trained_models/test_model/definition/0]
|
||||
'machine_learning/clear_tm_deployment_cache.yml',
|
||||
// client bug: 0.99995 does not equal 0.5
|
||||
'machine_learning/data_frame_evaluate.yml',
|
||||
// test definition bug? regex has whitespace, maybe needs to be removed
|
||||
'machine_learning/explain_data_frame_analytics.yml',
|
||||
// client bug: 4 != 227
|
||||
'machine_learning/preview_datafeed.yml',
|
||||
// test definition bug: error message does not match
|
||||
'machine_learning/revert_model_snapshot.yml',
|
||||
// test definition bug: error message does not match
|
||||
'machine_learning/update_model_snapshot.yml',
|
||||
// version_conflict_engine_exception
|
||||
'machine_learning/jobs_crud.yml',
|
||||
// test definition bug: error message does not match
|
||||
'machine_learning/model_snapshots.yml',
|
||||
// test definition bug: error message does not match
|
||||
'query_rules/30_test.yml',
|
||||
// client bug: 0 != 0.1
|
||||
'script/10_basic.yml',
|
||||
// client bug: request takes a "body" param, but "body" is a special keyword in the JS client
|
||||
'searchable_snapshots/10_basic.yml',
|
||||
// test builder bug: does `match` action need to support "array contains value"?
|
||||
'security/10_api_key_basic.yml',
|
||||
// test definition bug: error message does not match
|
||||
'security/140_user.yml',
|
||||
// test definition bug: error message does not match
|
||||
'security/30_privileges_stack.yml',
|
||||
// unknown issue: $profile.enabled path doesn't exist in response
|
||||
'security/130_user_profile.yml',
|
||||
// test definition bug: error message does not match
|
||||
'security/change_password.yml',
|
||||
// test builder bug: media_type_header_exception
|
||||
'simulate/ingest.yml',
|
||||
// client bug: request takes a "body" param, but "body" is a special keyword in the JS client
|
||||
'snapshot/10_basic.yml',
|
||||
// test definition bug: illegal_argument_exception
|
||||
'sql/10_basic.yml',
|
||||
// test definition bug: illegal_argument_exception
|
||||
'text_structure/10_basic.yml',
|
||||
// test definition bug: illegal_argument_exception
|
||||
'transform/10_basic.yml',
|
||||
]
|
||||
|
||||
const serverlessSkips = [
|
||||
// TODO: sql.getAsync does not set a content-type header but ES expects one
|
||||
// transport only sets a content-type if the body is not empty
|
||||
'sql/10_basic.yml',
|
||||
// TODO: bulk call in setup fails due to "malformed action/metadata line"
|
||||
// bulk body is being sent as a Buffer, unsure if related.
|
||||
'transform/10_basic.yml',
|
||||
// TODO: scripts_painless_execute expects {"result":"0.1"}, gets {"result":"0"}
|
||||
// body sent as Buffer, unsure if related
|
||||
'script/10_basic.yml',
|
||||
// TODO: expects {"outlier_detection.auc_roc.value":0.99995}, gets {"outlier_detection.auc_roc.value":0.5}
|
||||
// remove if/when https://github.com/elastic/elasticsearch-clients-tests/issues/37 is resolved
|
||||
'machine_learning/data_frame_evaluate.yml',
|
||||
// TODO: Cannot perform requested action because job [job-crud-test-apis] is not open
|
||||
'machine_learning/jobs_crud.yml',
|
||||
// TODO: test runner needs to support ignoring 410 errors
|
||||
'enrich/10_basic.yml',
|
||||
// TODO: parameter `enabled` is not allowed in source
|
||||
// Same underlying problem as https://github.com/elastic/elasticsearch-clients-tests/issues/55
|
||||
'cluster/component_templates.yml',
|
||||
// TODO: expecting `ct_field` field mapping to be returned, but instead only finds `field`
|
||||
'indices/simulate_template.yml',
|
||||
'indices/simulate_index_template.yml',
|
||||
// TODO: test currently times out
|
||||
'inference/10_basic.yml',
|
||||
// TODO: Fix: "Trained model deployment [test_model] is not allocated to any nodes"
|
||||
'machine_learning/20_trained_model_serverless.yml',
|
||||
// TODO: query_rules api not available yet
|
||||
'query_rules/10_query_rules.yml',
|
||||
'query_rules/20_rulesets.yml',
|
||||
'query_rules/30_test.yml',
|
||||
// TODO: security.putRole API not available
|
||||
'security/50_roles_serverless.yml',
|
||||
// TODO: expected undefined to equal 'some_table'
|
||||
'entsearch/50_connector_updates.yml',
|
||||
// TODO: resource_not_found_exception
|
||||
'tasks_serverless.yml',
|
||||
]
|
||||
|
||||
function parse (data) {
|
||||
let doc
|
||||
try {
|
||||
doc = yaml.load(data, { schema: yaml.CORE_SCHEMA })
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
async function build (yamlFiles, clientOptions) {
|
||||
await rimraf(generatedTestsPath)
|
||||
await mkdir(generatedTestsPath, { recursive: true })
|
||||
|
||||
for (const file of yamlFiles) {
|
||||
const apiName = file.split(`${sep}tests${sep}`)[1]
|
||||
const data = readFileSync(file, 'utf8')
|
||||
|
||||
const tests = data
|
||||
.split('\n---\n')
|
||||
.map(s => s.trim())
|
||||
// empty strings
|
||||
.filter(Boolean)
|
||||
.map(parse)
|
||||
// null values
|
||||
.filter(Boolean)
|
||||
|
||||
let code = "import { test } from 'tap'\n"
|
||||
code += "import { Client } from '@elastic/elasticsearch'\n\n"
|
||||
|
||||
const requires = tests.find(test => test.requires != null)
|
||||
let skip = new Set()
|
||||
if (requires != null) {
|
||||
const { serverless = true, stack = true } = requires.requires
|
||||
if (!serverless) skip.add('process.env.TEST_ES_SERVERLESS === "1"')
|
||||
if (!stack) skip.add('process.env.TEST_ES_STACK === "1"')
|
||||
}
|
||||
|
||||
if (stackSkips.includes(apiName)) skip.add('process.env.TEST_ES_STACK === "1"')
|
||||
if (serverlessSkips.includes(apiName)) skip.add('process.env.TEST_ES_SERVERLESS === "1"')
|
||||
|
||||
if (skip.size > 0) {
|
||||
code += `test('${apiName}', { skip: ${Array.from(skip).join(' || ')} }, t => {\n`
|
||||
} else {
|
||||
code += `test('${apiName}', t => {\n`
|
||||
}
|
||||
|
||||
for (const test of tests) {
|
||||
if (test.setup != null) {
|
||||
code += ' t.before(async () => {\n'
|
||||
code += indent(buildActions(test.setup), 4)
|
||||
code += ' })\n\n'
|
||||
}
|
||||
|
||||
if (test.teardown != null) {
|
||||
code += ' t.after(async () => {\n'
|
||||
code += indent(buildActions(test.teardown), 4)
|
||||
code += ' })\n\n'
|
||||
}
|
||||
|
||||
for (const key of Object.keys(test).filter(k => !['setup', 'teardown', 'requires'].includes(k))) {
|
||||
if (test[key].find(action => Object.keys(action)[0] === 'skip') != null) {
|
||||
code += ` t.test('${key}', { skip: true }, async t => {\n`
|
||||
} else {
|
||||
code += ` t.test('${key}', async t => {\n`
|
||||
}
|
||||
code += indent(buildActions(test[key]), 4)
|
||||
code += '\n t.end()\n'
|
||||
code += ' })\n'
|
||||
}
|
||||
// if (test.requires != null) requires = test.requires
|
||||
}
|
||||
|
||||
code += '\n t.end()\n'
|
||||
code += '})\n'
|
||||
|
||||
const testDir = join(generatedTestsPath, apiName.split(sep).slice(0, -1).join(sep))
|
||||
const testFile = join(testDir, apiName.split(sep).pop().replace(/\.ya?ml$/, '.mjs'))
|
||||
await mkdir(testDir, { recursive: true })
|
||||
writeFileSync(testFile, code, 'utf8')
|
||||
}
|
||||
|
||||
function buildActions (actions) {
|
||||
let code = `const client = new Client(${JSON.stringify(clientOptions, null, 2)})\n`
|
||||
code += 'let response\n\n'
|
||||
|
||||
const vars = new Set()
|
||||
|
||||
for (const action of actions) {
|
||||
const key = Object.keys(action)[0]
|
||||
switch (key) {
|
||||
case 'do':
|
||||
code += buildDo(action.do)
|
||||
break
|
||||
case 'set':
|
||||
const setResult = buildSet(action.set, vars)
|
||||
vars.add(setResult.varName)
|
||||
code += setResult.code
|
||||
break
|
||||
case 'transform_and_set':
|
||||
code += buildTransformAndSet(action.transform_and_set)
|
||||
break
|
||||
case 'match':
|
||||
code += buildMatch(action.match)
|
||||
break
|
||||
case 'lt':
|
||||
code += buildLt(action.lt)
|
||||
break
|
||||
case 'lte':
|
||||
code += buildLte(action.lte)
|
||||
break
|
||||
case 'gt':
|
||||
code += buildGt(action.gt)
|
||||
break
|
||||
case 'gte':
|
||||
code += buildGte(action.gte)
|
||||
break
|
||||
case 'length':
|
||||
code += buildLength(action.length)
|
||||
break
|
||||
case 'is_true':
|
||||
code += buildIsTrue(action.is_true)
|
||||
break
|
||||
case 'is_false':
|
||||
code += buildIsFalse(action.is_false)
|
||||
break
|
||||
case 'contains':
|
||||
code += buildContains(action.contains)
|
||||
break
|
||||
case 'exists':
|
||||
code += buildExists(action.exists)
|
||||
break
|
||||
case 'skip':
|
||||
break
|
||||
default:
|
||||
console.warn(`Action not supported: ${key}`)
|
||||
break
|
||||
}
|
||||
}
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
function buildDo (action) {
|
||||
let code = ''
|
||||
const keys = Object.keys(action)
|
||||
if (keys.includes('catch')) {
|
||||
code += 'try {\n'
|
||||
code += indent(buildRequest(action), 2)
|
||||
code += '} catch (err) {\n'
|
||||
code += ` t.match(err.toString(), ${buildValLiteral(action.catch)})\n`
|
||||
code += '}\n'
|
||||
} else {
|
||||
code += buildRequest(action)
|
||||
}
|
||||
return code
|
||||
}
|
||||
|
||||
function buildRequest(action) {
|
||||
let code = ''
|
||||
|
||||
const options = { meta: true }
|
||||
|
||||
for (const key of Object.keys(action)) {
|
||||
if (key === 'catch') continue
|
||||
|
||||
if (key === 'headers') {
|
||||
options.headers = action.headers
|
||||
continue
|
||||
}
|
||||
|
||||
const params = action[key]
|
||||
if (params.ignore != null) {
|
||||
if (Array.isArray(params.ignore)) {
|
||||
options.ignore = params.ignore
|
||||
} else {
|
||||
options.ignore = [params.ignore]
|
||||
}
|
||||
}
|
||||
|
||||
code += `response = await client.${toCamelCase(key)}(${buildApiParams(action[key])}, ${JSON.stringify(options)})\n`
|
||||
}
|
||||
return code
|
||||
}
|
||||
|
||||
function buildSet (action, vars) {
|
||||
const key = Object.keys(action)[0]
|
||||
const varName = action[key]
|
||||
const lookup = buildLookup(key)
|
||||
|
||||
let code = ''
|
||||
if (vars.has(varName)) {
|
||||
code = `${varName} = ${lookup}\n`
|
||||
} else {
|
||||
code =`let ${varName} = ${lookup}\n`
|
||||
}
|
||||
return { code, varName }
|
||||
}
|
||||
|
||||
function buildTransformAndSet (action) {
|
||||
return `// TODO buildTransformAndSet: ${JSON.stringify(action)}\n`
|
||||
}
|
||||
|
||||
function buildMatch (action) {
|
||||
const key = Object.keys(action)[0]
|
||||
let lookup = buildLookup(key)
|
||||
const val = buildValLiteral(action[key])
|
||||
return `t.match(${lookup}, ${val})\n`
|
||||
}
|
||||
|
||||
function buildLt (action) {
|
||||
const key = Object.keys(action)[0]
|
||||
const lookup = buildLookup(key)
|
||||
const val = buildValLiteral(action[key])
|
||||
return `t.ok(${lookup} < ${val})\n`
|
||||
}
|
||||
|
||||
function buildLte (action) {
|
||||
const key = Object.keys(action)[0]
|
||||
const lookup = buildLookup(key)
|
||||
const val = buildValLiteral(action[key])
|
||||
return `t.ok(${lookup} <= ${val})\n`
|
||||
}
|
||||
|
||||
function buildGt (action) {
|
||||
const key = Object.keys(action)[0]
|
||||
const lookup = buildLookup(key)
|
||||
const val = buildValLiteral(action[key])
|
||||
return `t.ok(${lookup} > ${val})\n`
|
||||
}
|
||||
|
||||
function buildGte (action) {
|
||||
const key = Object.keys(action)[0]
|
||||
const lookup = buildLookup(key)
|
||||
const val = buildValLiteral(action[key])
|
||||
return `t.ok(${lookup} >= ${val})\n`
|
||||
}
|
||||
|
||||
function buildLength (action) {
|
||||
const key = Object.keys(action)[0]
|
||||
const lookup = buildLookup(key)
|
||||
const val = buildValLiteral(action[key])
|
||||
|
||||
let code = ''
|
||||
code += `if (typeof ${lookup} === 'object' && !Array.isArray(${lookup})) {\n`
|
||||
code += ` t.equal(Object.keys(${lookup}).length, ${val})\n`
|
||||
code += `} else {\n`
|
||||
code += ` t.equal(${lookup}.length, ${val})\n`
|
||||
code += `}\n`
|
||||
return code
|
||||
}
|
||||
|
||||
function buildIsTrue (action) {
|
||||
let lookup = `${buildLookup(action)}`
|
||||
let errMessage = `\`${action} should be truthy. found: '\$\{JSON.stringify(${lookup})\}'\``
|
||||
if (lookup.includes('JSON.stringify')) errMessage = `\`${action} should be truthy. found: '\$\{${lookup}\}'\``
|
||||
return `t.ok(${lookup} === "true" || (Boolean(${lookup}) && ${lookup} !== "false"), ${errMessage})\n`
|
||||
}
|
||||
|
||||
function buildIsFalse (action) {
|
||||
let lookup = `${buildLookup(action)}`
|
||||
let errMessage = `\`${action} should be falsy. found: '\$\{JSON.stringify(${lookup})\}'\``
|
||||
if (lookup.includes('JSON.stringify')) errMessage = `\`${action} should be falsy. found: '\$\{${lookup}\}'\``
|
||||
return `t.ok(${lookup} === "false" || !Boolean(${lookup}), ${errMessage})\n`
|
||||
}
|
||||
|
||||
function buildContains (action) {
|
||||
const key = Object.keys(action)[0]
|
||||
const lookup = buildLookup(key)
|
||||
const val = buildValLiteral(action[key])
|
||||
return `t.ok(${lookup}.includes(${val}), '${JSON.stringify(val)} not found in ${key}')\n`
|
||||
}
|
||||
|
||||
function buildExists (keyName) {
|
||||
const lookup = buildLookup(keyName)
|
||||
return `t.ok(${lookup} != null, \`Key "${keyName}" not found in response body: \$\{JSON.stringify(response.body, null, 2)\}\`)\n`
|
||||
}
|
||||
|
||||
function buildApiParams (params) {
|
||||
if (Object.keys(params).length === 0) {
|
||||
return 'undefined'
|
||||
} else {
|
||||
const out = {}
|
||||
Object.keys(params).filter(k => k !== 'ignore' && k !== 'headers').forEach(k => out[k] = params[k])
|
||||
return buildValLiteral(out)
|
||||
}
|
||||
}
|
||||
|
||||
function toCamelCase (name) {
|
||||
return name.replace(/_([a-z])/g, g => g[1].toUpperCase())
|
||||
}
|
||||
|
||||
function indent (str, spaces) {
|
||||
const tabs = ' '.repeat(spaces)
|
||||
return str.replace(/\s+$/, '').split('\n').map(l => `${tabs}${l}`).join('\n') + '\n'
|
||||
}
|
||||
|
||||
function buildLookup (path) {
|
||||
if (path === '$body') return '(typeof response.body === "string" ? response.body : JSON.stringify(response.body))'
|
||||
|
||||
const outPath = path.split('.').map(step => {
|
||||
if (parseInt(step, 10).toString() === step) {
|
||||
return `[${step}]`
|
||||
} else if (step.match(/^\$[a-zA-Z0-9_]+$/)) {
|
||||
const lookup = step.replace(/^\$/, '')
|
||||
if (lookup === 'body') return ''
|
||||
return `[${lookup}]`
|
||||
} else if (step === '') {
|
||||
return ''
|
||||
} else {
|
||||
return `['${step}']`
|
||||
}
|
||||
}).join('')
|
||||
return `response.body${outPath}`
|
||||
}
|
||||
|
||||
function buildValLiteral (val) {
|
||||
if (typeof val === 'string') val = val.trim()
|
||||
if (isRegExp(val)) {
|
||||
return JSON.stringify(val).replace(/^"/, '').replace(/"$/, '').replaceAll('\\\\', '\\')
|
||||
} else if (isVariable(val)) {
|
||||
if (val === '$body') return 'JSON.stringify(response.body)'
|
||||
return val.replace(/^\$/, '')
|
||||
} else if (isPlainObject(val)) {
|
||||
return JSON.stringify(cleanObject(val), null, 2).replace(/"\$([a-zA-Z0-9_]+)"/g, '$1')
|
||||
} else {
|
||||
return JSON.stringify(val)
|
||||
}
|
||||
}
|
||||
|
||||
function isRegExp (str) {
|
||||
return typeof str === 'string' && str.startsWith('/') && str.endsWith('/')
|
||||
}
|
||||
|
||||
function isVariable (str) {
|
||||
return typeof str === 'string' && str.match(/^\$[a-zA-Z0-9_]+$/) != null
|
||||
}
|
||||
|
||||
function cleanObject (obj) {
|
||||
Object.keys(obj).forEach(key => {
|
||||
let val = obj[key]
|
||||
if (typeof val === 'string' && val.trim().startsWith('{') && val.trim().endsWith('}')) {
|
||||
// attempt to parse as object
|
||||
try {
|
||||
val = JSON.parse(val)
|
||||
} catch {
|
||||
}
|
||||
} else if (isPlainObject(val)) {
|
||||
val = cleanObject(val)
|
||||
} else if (Array.isArray(val)) {
|
||||
val = val.map(item => isPlainObject(item) ? cleanObject(item) : item)
|
||||
}
|
||||
obj[key] = val
|
||||
})
|
||||
return obj
|
||||
}
|
||||
|
||||
function isPlainObject(obj) {
|
||||
return typeof obj === 'object' && !Array.isArray(obj) && obj != null
|
||||
}
|
||||
|
||||
module.exports = build
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user