Elasticsearch v8 (#1558)
This commit is contained in:
committed by
GitHub
parent
4c72b981cd
commit
1a227459f0
@ -42,6 +42,13 @@ const MAX_FILE_TIME = 1000 * 30
|
||||
const MAX_TEST_TIME = 1000 * 3
|
||||
|
||||
const freeSkips = {
|
||||
// the v8 client never sends the scroll_id in querystgring,
|
||||
// the way the test is structured causes a security exception
|
||||
'free/scroll/10_basic.yml': ['Body params override query string'],
|
||||
'free/scroll/11_clear.yml': [
|
||||
'Body params with array param override query string',
|
||||
'Body params with string param scroll id override query string'
|
||||
],
|
||||
// TODO: remove this once 'arbitrary_key' is implemented
|
||||
// https://github.com/elastic/elasticsearch/pull/41492
|
||||
'indices.split/30_copy_settings.yml': ['*'],
|
||||
@ -124,19 +131,31 @@ const platinumBlackList = {
|
||||
'unsigned_long/30_multi_fields.yml': ['*'],
|
||||
'unsigned_long/40_different_numeric.yml': ['*'],
|
||||
'unsigned_long/50_script_values.yml': ['*'],
|
||||
// the v8 client flattens the body into the parent object
|
||||
'platinum/users/10_basic.yml': ['Test put user with different username in body'],
|
||||
// docker issue?
|
||||
'watcher/execute_watch/60_http_input.yml': ['*'],
|
||||
// the checks are correct, but for some reason the test is failing on js side
|
||||
// I bet is because the backslashes in the rg
|
||||
'watcher/execute_watch/70_invalid.yml': ['*'],
|
||||
'watcher/put_watch/10_basic.yml': ['*'],
|
||||
'xpack/15_basic.yml': ['*']
|
||||
'xpack/15_basic.yml': ['*'],
|
||||
|
||||
// test that are failing that needs to be investigated
|
||||
// the error cause can either be in the yaml test or in the specification
|
||||
|
||||
// start should be a string in the yaml test
|
||||
'platinum/ml/delete_job_force.yml': ['Test force delete an open job that is referred by a started datafeed'],
|
||||
'platinum/ml/evaluate_data_frame.yml': ['*'],
|
||||
'platinum/ml/get_datafeed_stats.yml': ['*'],
|
||||
// start should be a string in the yaml test
|
||||
'platinum/ml/start_stop_datafeed.yml': ['*']
|
||||
}
|
||||
|
||||
function runner (opts = {}) {
|
||||
const options = { node: opts.node }
|
||||
if (opts.isXPack) {
|
||||
options.ssl = {
|
||||
options.tls = {
|
||||
ca: readFileSync(join(__dirname, '..', '..', '.ci', 'certs', 'ca.crt'), 'utf8'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
@ -157,7 +176,7 @@ function runner (opts = {}) {
|
||||
|
||||
async function waitCluster (client, times = 0) {
|
||||
try {
|
||||
await client.cluster.health({ waitForStatus: 'green', timeout: '50s' })
|
||||
await client.cluster.health({ wait_for_status: 'green', timeout: '50s' })
|
||||
} catch (err) {
|
||||
if (++times < 10) {
|
||||
await sleep(5000)
|
||||
@ -172,7 +191,7 @@ async function start ({ client, isXPack }) {
|
||||
log('Waiting for Elasticsearch')
|
||||
await waitCluster(client)
|
||||
|
||||
const { body } = await client.info()
|
||||
const body = await client.info()
|
||||
const { number: version, build_hash: hash } = body.version
|
||||
|
||||
log(`Downloading artifacts for hash ${hash}...`)
|
||||
@ -337,7 +356,7 @@ function generateJunitXmlReport (junit, suite) {
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
const node = process.env.TEST_ES_SERVER || 'https://elastic:changeme@localhost:9200'
|
||||
const node = process.env.TEST_ES_SERVER || 'http://elastic:changeme@localhost:9200'
|
||||
const opts = {
|
||||
node,
|
||||
isXPack: process.env.TEST_SUITE !== 'free'
|
||||
|
||||
52
test/integration/integration/README.md
Normal file
52
test/integration/integration/README.md
Normal file
@ -0,0 +1,52 @@
|
||||
# `elasticsearch-js` integration test suite
|
||||
|
||||
> What? A README to explain how the integration test work??
|
||||
|
||||
Yes.
|
||||
|
||||
## Background
|
||||
Elasticsearch offers its entire API via HTTP REST endpoints. You can find the whole API specification for every version [here](https://github.com/elastic/elasticsearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/api).<br/>
|
||||
To support different languages at the same time, the Elasticsearch team decided to provide a [YAML specification](https://github.com/elastic/elasticsearch/tree/master/rest-api-spec/src/main/resources/rest-api-spec/test) to test every endpoint, body, headers, warning, error and so on.<br/>
|
||||
This testing suite uses that specification to generate the test for the specified version of Elasticsearch on the fly.
|
||||
|
||||
## Run
|
||||
Run the testing suite is very easy, you just need to run the preconfigured npm script:
|
||||
```sh
|
||||
npm run test:integration
|
||||
```
|
||||
|
||||
The first time you run this command, the Elasticsearch repository will be cloned inside the integration test folder, to be able to access the YAML specification, so it might take some time *(luckily, only the first time)*.<br/>
|
||||
Once the Elasticsearch repository has been cloned, the testing suite will connect to the provided Elasticsearch instance and then checkout the build hash in the repository. Finally, it will start running every test.
|
||||
|
||||
The specification does not allow the test to be run in parallel, so it might take a while to run the entire testing suite; on my machine, `MacBookPro15,2 core i7 2.7GHz 16GB of RAM` it takes around four minutes.
|
||||
|
||||
### Exit on the first failure
|
||||
Bu default the suite will run all the test, even if one assertion has failed. If you want to stop the test at the first failure, use the bailout option:
|
||||
```sh
|
||||
npm run test:integration -- --bail
|
||||
```
|
||||
|
||||
### Calculate the code coverage
|
||||
If you want to calculate the code coverage just run the testing suite with the following parameters, once the test ends, it will open a browser window with the results.
|
||||
```sh
|
||||
npm run test:integration -- --cov --coverage-report=html
|
||||
```
|
||||
|
||||
## How does this thing work?
|
||||
At first sight, it might seem complicated, but once you understand what the moving parts are, it's quite easy.
|
||||
1. Connects to the given Elasticsearch instance
|
||||
1. Gets the ES version and build hash
|
||||
1. Checkout to the given hash (and clone the repository if it is not present)
|
||||
1. Reads the folder list and for each folder the yaml file list
|
||||
1. Starts running folder by folder every file
|
||||
1. Read and parse the yaml files
|
||||
1. Creates a subtest structure to have a cleaner output
|
||||
1. Runs the assertions
|
||||
1. Repeat!
|
||||
|
||||
Inside the `index.js` file, you will find the connection, cloning, reading and parsing part of the test, while inside the `test-runner.js` file you will find the function to handle the assertions. Inside `test-runner.js`, we use a [queue](https://github.com/delvedor/workq) to be sure that everything is run in the correct order.
|
||||
|
||||
Checkout the [rest-api-spec readme](https://github.com/elastic/elasticsearch/blob/master/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc) if you want to know more about how the assertions work.
|
||||
|
||||
#### Why are we running the test with the `--harmony` flag?
|
||||
Because on Node v6 the regex lookbehinds are not supported.
|
||||
96
test/integration/integration/helper.js
Normal file
96
test/integration/integration/helper.js
Normal file
@ -0,0 +1,96 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
function runInParallel (client, operation, options, clientOptions) {
|
||||
if (options.length === 0) return Promise.resolve()
|
||||
const operations = options.map(opts => {
|
||||
const api = delve(client, operation).bind(client)
|
||||
return api(opts, clientOptions)
|
||||
})
|
||||
|
||||
return Promise.all(operations)
|
||||
}
|
||||
|
||||
// code from https://github.com/developit/dlv
|
||||
// needed to support an edge case: `a\.b`
|
||||
// where `a.b` is a single field: { 'a.b': true }
|
||||
function delve (obj, key, def, p) {
|
||||
p = 0
|
||||
// handle the key with a dot inside that is not a part of the path
|
||||
// and removes the backslashes from the key
|
||||
key = key.split
|
||||
? key.split(/(?<!\\)\./g).map(k => k.replace(/\\/g, ''))
|
||||
: key.replace(/\\/g, '')
|
||||
while (obj && p < key.length) obj = obj[key[p++]]
|
||||
return (obj === undefined || p < key.length) ? def : obj
|
||||
}
|
||||
|
||||
function to (promise) {
|
||||
return promise.then(data => [null, data], err => [err, undefined])
|
||||
}
|
||||
|
||||
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
|
||||
|
||||
function isXPackTemplate (name) {
|
||||
if (name.startsWith('.monitoring-')) {
|
||||
return true
|
||||
}
|
||||
if (name.startsWith('.watch') || name.startsWith('.triggered_watches')) {
|
||||
return true
|
||||
}
|
||||
if (name.startsWith('.data-frame-')) {
|
||||
return true
|
||||
}
|
||||
if (name.startsWith('.ml-')) {
|
||||
return true
|
||||
}
|
||||
if (name.startsWith('.transform-')) {
|
||||
return true
|
||||
}
|
||||
switch (name) {
|
||||
case '.watches':
|
||||
case 'logstash-index-template':
|
||||
case '.logstash-management':
|
||||
case 'security_audit_log':
|
||||
case '.slm-history':
|
||||
case '.async-search':
|
||||
case 'saml-service-provider':
|
||||
case 'ilm-history':
|
||||
case 'logs':
|
||||
case 'logs-settings':
|
||||
case 'logs-mappings':
|
||||
case 'metrics':
|
||||
case 'metrics-settings':
|
||||
case 'metrics-mappings':
|
||||
case 'synthetics':
|
||||
case 'synthetics-settings':
|
||||
case 'synthetics-mappings':
|
||||
case '.snapshot-blob-cache':
|
||||
case '.deprecation-indexing-template':
|
||||
case '.deprecation-indexing-mappings':
|
||||
case '.deprecation-indexing-settings':
|
||||
case 'data-streams-mappings':
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
module.exports = { runInParallel, delve, to, sleep, isXPackTemplate }
|
||||
204
test/integration/integration/helpers/bulk.test.js
Normal file
204
test/integration/integration/helpers/bulk.test.js
Normal file
@ -0,0 +1,204 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const datasetPath = join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson')
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('bulk index', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: INDEX,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body } = await client.count({ index: INDEX })
|
||||
t.match(body, { count: 5000 })
|
||||
})
|
||||
|
||||
test('bulk index with custom id', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split(JSON.parse)),
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body } = await client.get({
|
||||
index: INDEX,
|
||||
id: '19273860' // id of document n° 4242
|
||||
})
|
||||
|
||||
t.equal(body._index, INDEX)
|
||||
t.equal(body._id, '19273860')
|
||||
t.equal(body._source.id, '19273860')
|
||||
})
|
||||
|
||||
test('abort the operation on document drop', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const b = client.helpers.bulk({
|
||||
datasource: stream.pipe(split(JSON.parse)),
|
||||
concurrency: 1,
|
||||
onDrop (doc) {
|
||||
t.equal(doc.status, 400)
|
||||
t.equal(doc.error.type, 'mapper_parsing_exception')
|
||||
t.equal(doc.document.id, '45924372')
|
||||
b.abort()
|
||||
},
|
||||
onDocument (doc) {
|
||||
if (doc.id === '45924372') { // id of document n° 500
|
||||
// this will break the mapping
|
||||
doc.title = { foo: 'bar' }
|
||||
}
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const result = await b
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.equal(result.total - 1, result.successful)
|
||||
t.match(result, {
|
||||
retry: 0,
|
||||
failed: 1,
|
||||
aborted: true
|
||||
})
|
||||
})
|
||||
|
||||
test('bulk delete', async t => {
|
||||
const indexResult = await client.helpers.bulk({
|
||||
datasource: createReadStream(datasetPath).pipe(split(JSON.parse)),
|
||||
refreshOnCompletion: true,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(indexResult.time, 'number')
|
||||
t.type(indexResult.bytes, 'number')
|
||||
t.match(indexResult, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body: afterIndex } = await client.count({ index: INDEX })
|
||||
t.match(afterIndex, { count: 5000 })
|
||||
|
||||
const deleteResult = await client.helpers.bulk({
|
||||
datasource: createReadStream(datasetPath).pipe(split(JSON.parse)),
|
||||
refreshOnCompletion: true,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
delete: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(deleteResult.time, 'number')
|
||||
t.type(deleteResult.bytes, 'number')
|
||||
t.match(deleteResult, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body: afterDelete } = await client.count({ index: INDEX })
|
||||
t.match(afterDelete, { count: 0 })
|
||||
})
|
||||
121
test/integration/integration/helpers/msearch.test.js
Normal file
121
test/integration/integration/helpers/msearch.test.js
Normal file
@ -0,0 +1,121 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client, errors } = require('../../../')
|
||||
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: true,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
if (result.failed > 0) {
|
||||
throw new Error('Failed bulk indexing docs')
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('Basic', t => {
|
||||
t.plan(4)
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { match: { title: 'javascript' } } },
|
||||
(err, result) => {
|
||||
t.error(err)
|
||||
t.equal(result.body.hits.total.value, 106)
|
||||
}
|
||||
)
|
||||
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { match: { title: 'ruby' } } },
|
||||
(err, result) => {
|
||||
t.error(err)
|
||||
t.equal(result.body.hits.total.value, 29)
|
||||
}
|
||||
)
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Bad request', t => {
|
||||
t.plan(3)
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { match: { title: 'javascript' } } },
|
||||
(err, result) => {
|
||||
t.error(err)
|
||||
t.equal(result.body.hits.total.value, 106)
|
||||
}
|
||||
)
|
||||
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { foo: { title: 'ruby' } } },
|
||||
(err, result) => {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
}
|
||||
)
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Send multiple request concurrently over the concurrency limit', t => {
|
||||
t.plan(20)
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
m.search(
|
||||
{ index: INDEX },
|
||||
{ query: { match: { title: 'javascript' } } },
|
||||
(err, result) => {
|
||||
t.error(err)
|
||||
t.equal(result.body.hits.total.value, 106)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
118
test/integration/integration/helpers/scroll.test.js
Normal file
118
test/integration/integration/helpers/scroll.test.js
Normal file
@ -0,0 +1,118 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: true,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
if (result.failed > 0) {
|
||||
throw new Error('Failed bulk indexing docs')
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('search helper', async t => {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
let count = 0
|
||||
for await (const search of scrollSearch) {
|
||||
count += 1
|
||||
for (const doc of search.documents) {
|
||||
t.ok(doc.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
}
|
||||
t.equal(count, 11)
|
||||
})
|
||||
|
||||
test('clear a scroll search', async t => {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
let count = 0
|
||||
for await (const search of scrollSearch) {
|
||||
count += 1
|
||||
if (count === 2) {
|
||||
search.clear()
|
||||
}
|
||||
}
|
||||
t.equal(count, 2)
|
||||
})
|
||||
|
||||
test('scroll documents', async t => {
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
let count = 0
|
||||
for await (const doc of scrollSearch) {
|
||||
count += 1
|
||||
t.ok(doc.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
t.equal(count, 106)
|
||||
})
|
||||
71
test/integration/integration/helpers/search.test.js
Normal file
71
test/integration/integration/helpers/search.test.js
Normal file
@ -0,0 +1,71 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: true,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
if (result.failed > 0) {
|
||||
throw new Error('Failed bulk indexing docs')
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('search helper', async t => {
|
||||
const results = await client.helpers.search({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
t.equal(results.length, 10)
|
||||
for (const result of results) {
|
||||
t.ok(result.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
})
|
||||
385
test/integration/integration/index.js
Normal file
385
test/integration/integration/index.js
Normal file
@ -0,0 +1,385 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
process.on('unhandledRejection', function (err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
const { writeFileSync, readFileSync, readdirSync, statSync } = require('fs')
|
||||
const { join, sep } = require('path')
|
||||
const yaml = require('js-yaml')
|
||||
const ms = require('ms')
|
||||
const { Client } = require('../../index')
|
||||
const build = require('./test-runner')
|
||||
const { sleep } = require('./helper')
|
||||
const createJunitReporter = require('./reporter')
|
||||
const downloadArtifacts = require('../../scripts/download-artifacts')
|
||||
|
||||
const yamlFolder = downloadArtifacts.locations.freeTestFolder
|
||||
const xPackYamlFolder = downloadArtifacts.locations.xPackTestFolder
|
||||
|
||||
const MAX_API_TIME = 1000 * 90
|
||||
const MAX_FILE_TIME = 1000 * 30
|
||||
const MAX_TEST_TIME = 1000 * 3
|
||||
|
||||
const freeSkips = {
|
||||
// TODO: remove this once 'arbitrary_key' is implemented
|
||||
// https://github.com/elastic/elasticsearch/pull/41492
|
||||
'indices.split/30_copy_settings.yml': ['*'],
|
||||
'indices.stats/50_disk_usage.yml': ['Disk usage stats'],
|
||||
'indices.stats/60_field_usage.yml': ['Field usage stats'],
|
||||
// skipping because we are booting ES with `discovery.type=single-node`
|
||||
// and this test will fail because of this configuration
|
||||
'nodes.stats/30_discovery.yml': ['*'],
|
||||
// the expected error is returning a 503,
|
||||
// which triggers a retry and the node to be marked as dead
|
||||
'search.aggregation/240_max_buckets.yml': ['*'],
|
||||
// the yaml runner assumes that null means "does not exists",
|
||||
// while null is a valid json value, so the check will fail
|
||||
'search/320_disallow_queries.yml': ['Test disallow expensive queries']
|
||||
}
|
||||
const platinumBlackList = {
|
||||
'analytics/histogram.yml': ['Histogram requires values in increasing order'],
|
||||
// this two test cases are broken, we should
|
||||
// return on those in the future.
|
||||
'analytics/top_metrics.yml': [
|
||||
'sort by keyword field fails',
|
||||
'sort by string script fails'
|
||||
],
|
||||
'cat.aliases/10_basic.yml': ['Empty cluster'],
|
||||
'index/10_with_id.yml': ['Index with ID'],
|
||||
'indices.get_alias/10_basic.yml': ['Get alias against closed indices'],
|
||||
'indices.get_alias/20_empty.yml': ['Check empty aliases when getting all aliases via /_alias'],
|
||||
'text_structure/find_structure.yml': ['*'],
|
||||
// https://github.com/elastic/elasticsearch/pull/39400
|
||||
'ml/jobs_crud.yml': ['Test put job with id that is already taken'],
|
||||
// object keys must me strings, and `0.0.toString()` is `0`
|
||||
'ml/evaluate_data_frame.yml': [
|
||||
'Test binary_soft_classifition precision',
|
||||
'Test binary_soft_classifition recall',
|
||||
'Test binary_soft_classifition confusion_matrix'
|
||||
],
|
||||
// it gets random failures on CI, must investigate
|
||||
'ml/set_upgrade_mode.yml': [
|
||||
'Attempt to open job when upgrade_mode is enabled',
|
||||
'Setting upgrade mode to disabled from enabled'
|
||||
],
|
||||
// The cleanup fails with a index not found when retrieving the jobs
|
||||
'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'],
|
||||
'ml/bucket_correlation_agg.yml': ['Test correlation bucket agg simple'],
|
||||
'ml/preview_datafeed.yml': ['*'],
|
||||
// Investigate why is failing
|
||||
'ml/inference_crud.yml': ['*'],
|
||||
// investigate why this is failing
|
||||
'monitoring/bulk/10_basic.yml': ['*'],
|
||||
'monitoring/bulk/20_privileges.yml': ['*'],
|
||||
'license/20_put_license.yml': ['*'],
|
||||
'snapshot/10_basic.yml': ['*'],
|
||||
'snapshot/20_operator_privileges_disabled.yml': ['*'],
|
||||
// the body is correct, but the regex is failing
|
||||
'sql/sql.yml': ['Getting textual representation'],
|
||||
'searchable_snapshots/10_usage.yml': ['*'],
|
||||
'service_accounts/10_basic.yml': ['*'],
|
||||
// we are setting two certificates in the docker config
|
||||
'ssl/10_basic.yml': ['*'],
|
||||
// very likely, the index template has not been loaded yet.
|
||||
// we should run a indices.existsTemplate, but the name of the
|
||||
// template may vary during time.
|
||||
'transforms_crud.yml': [
|
||||
'Test basic transform crud',
|
||||
'Test transform with query and array of indices in source',
|
||||
'Test PUT continuous transform',
|
||||
'Test PUT continuous transform without delay set'
|
||||
],
|
||||
'transforms_force_delete.yml': [
|
||||
'Test force deleting a running transform'
|
||||
],
|
||||
'transforms_cat_apis.yml': ['*'],
|
||||
'transforms_start_stop.yml': ['*'],
|
||||
'transforms_stats.yml': ['*'],
|
||||
'transforms_stats_continuous.yml': ['*'],
|
||||
'transforms_update.yml': ['*'],
|
||||
// js does not support ulongs
|
||||
'unsigned_long/10_basic.yml': ['*'],
|
||||
'unsigned_long/20_null_value.yml': ['*'],
|
||||
'unsigned_long/30_multi_fields.yml': ['*'],
|
||||
'unsigned_long/40_different_numeric.yml': ['*'],
|
||||
'unsigned_long/50_script_values.yml': ['*'],
|
||||
// docker issue?
|
||||
'watcher/execute_watch/60_http_input.yml': ['*'],
|
||||
// the checks are correct, but for some reason the test is failing on js side
|
||||
// I bet is because the backslashes in the rg
|
||||
'watcher/execute_watch/70_invalid.yml': ['*'],
|
||||
'watcher/put_watch/10_basic.yml': ['*'],
|
||||
'xpack/15_basic.yml': ['*']
|
||||
}
|
||||
|
||||
function runner (opts = {}) {
|
||||
const options = { node: opts.node }
|
||||
if (opts.isXPack) {
|
||||
options.ssl = {
|
||||
ca: readFileSync(join(__dirname, '..', '..', '.ci', 'certs', 'ca.crt'), 'utf8'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
}
|
||||
const client = new Client(options)
|
||||
log('Loading yaml suite')
|
||||
start({ client, isXPack: opts.isXPack })
|
||||
.catch(err => {
|
||||
if (err.name === 'ResponseError') {
|
||||
console.error(err)
|
||||
console.log(JSON.stringify(err.meta, null, 2))
|
||||
} else {
|
||||
console.error(err)
|
||||
}
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
async function waitCluster (client, times = 0) {
|
||||
try {
|
||||
await client.cluster.health({ waitForStatus: 'green', timeout: '50s' })
|
||||
} catch (err) {
|
||||
if (++times < 10) {
|
||||
await sleep(5000)
|
||||
return waitCluster(client, times)
|
||||
}
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
async function start ({ client, isXPack }) {
|
||||
log('Waiting for Elasticsearch')
|
||||
await waitCluster(client)
|
||||
|
||||
const { body } = await client.info()
|
||||
const { number: version, build_hash: hash } = body.version
|
||||
|
||||
log(`Downloading artifacts for hash ${hash}...`)
|
||||
await downloadArtifacts({ hash, version })
|
||||
|
||||
log(`Testing ${isXPack ? 'Platinum' : 'Free'} api...`)
|
||||
const junit = createJunitReporter()
|
||||
const junitTestSuites = junit.testsuites(`Integration test for ${isXPack ? 'Platinum' : 'Free'} api`)
|
||||
|
||||
const stats = {
|
||||
total: 0,
|
||||
skip: 0,
|
||||
pass: 0,
|
||||
assertions: 0
|
||||
}
|
||||
const folders = getAllFiles(isXPack ? xPackYamlFolder : yamlFolder)
|
||||
.filter(t => !/(README|TODO)/g.test(t))
|
||||
// we cluster the array based on the folder names,
|
||||
// to provide a better test log output
|
||||
.reduce((arr, file) => {
|
||||
const path = file.slice(file.indexOf('/rest-api-spec/test'), file.lastIndexOf('/'))
|
||||
let inserted = false
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
if (arr[i][0].includes(path)) {
|
||||
inserted = true
|
||||
arr[i].push(file)
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!inserted) arr.push([file])
|
||||
return arr
|
||||
}, [])
|
||||
|
||||
const totalTime = now()
|
||||
for (const folder of folders) {
|
||||
// pretty name
|
||||
const apiName = folder[0].slice(
|
||||
folder[0].indexOf(`${sep}rest-api-spec${sep}test`) + 19,
|
||||
folder[0].lastIndexOf(sep)
|
||||
)
|
||||
|
||||
log('Testing ' + apiName.slice(1))
|
||||
const apiTime = now()
|
||||
|
||||
for (const file of folder) {
|
||||
const testRunner = build({
|
||||
client,
|
||||
version,
|
||||
isXPack: file.includes('platinum')
|
||||
})
|
||||
const fileTime = now()
|
||||
const data = readFileSync(file, 'utf8')
|
||||
// get the test yaml (as object), some file has multiple yaml documents inside,
|
||||
// every document is separated by '---', so we split on the separator
|
||||
// and then we remove the empty strings, finally we parse them
|
||||
const tests = data
|
||||
.split('\n---\n')
|
||||
.map(s => s.trim())
|
||||
// empty strings
|
||||
.filter(Boolean)
|
||||
.map(parse)
|
||||
// null values
|
||||
.filter(Boolean)
|
||||
|
||||
// get setup and teardown if present
|
||||
let setupTest = null
|
||||
let teardownTest = null
|
||||
for (const test of tests) {
|
||||
if (test.setup) setupTest = test.setup
|
||||
if (test.teardown) teardownTest = test.teardown
|
||||
}
|
||||
|
||||
const cleanPath = file.slice(file.lastIndexOf(apiName))
|
||||
log(' ' + cleanPath)
|
||||
const junitTestSuite = junitTestSuites.testsuite(apiName.slice(1) + ' - ' + cleanPath)
|
||||
|
||||
for (const test of tests) {
|
||||
const testTime = now()
|
||||
const name = Object.keys(test)[0]
|
||||
if (name === 'setup' || name === 'teardown') continue
|
||||
const junitTestCase = junitTestSuite.testcase(name)
|
||||
|
||||
stats.total += 1
|
||||
if (shouldSkip(isXPack, file, name)) {
|
||||
stats.skip += 1
|
||||
junitTestCase.skip('This test is in the skip list of the client')
|
||||
junitTestCase.end()
|
||||
continue
|
||||
}
|
||||
log(' - ' + name)
|
||||
try {
|
||||
await testRunner.run(setupTest, test[name], teardownTest, stats, junitTestCase)
|
||||
stats.pass += 1
|
||||
} catch (err) {
|
||||
junitTestCase.failure(err)
|
||||
junitTestCase.end()
|
||||
junitTestSuite.end()
|
||||
junitTestSuites.end()
|
||||
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
const totalTestTime = now() - testTime
|
||||
junitTestCase.end()
|
||||
if (totalTestTime > MAX_TEST_TIME) {
|
||||
log(' took too long: ' + ms(totalTestTime))
|
||||
} else {
|
||||
log(' took: ' + ms(totalTestTime))
|
||||
}
|
||||
}
|
||||
junitTestSuite.end()
|
||||
const totalFileTime = now() - fileTime
|
||||
if (totalFileTime > MAX_FILE_TIME) {
|
||||
log(` ${cleanPath} took too long: ` + ms(totalFileTime))
|
||||
} else {
|
||||
log(` ${cleanPath} took: ` + ms(totalFileTime))
|
||||
}
|
||||
}
|
||||
const totalApiTime = now() - apiTime
|
||||
if (totalApiTime > MAX_API_TIME) {
|
||||
log(`${apiName} took too long: ` + ms(totalApiTime))
|
||||
} else {
|
||||
log(`${apiName} took: ` + ms(totalApiTime))
|
||||
}
|
||||
}
|
||||
junitTestSuites.end()
|
||||
generateJunitXmlReport(junit, isXPack ? 'platinum' : 'free')
|
||||
log(`Total testing time: ${ms(now() - totalTime)}`)
|
||||
log(`Test stats:
|
||||
- Total: ${stats.total}
|
||||
- Skip: ${stats.skip}
|
||||
- Pass: ${stats.pass}
|
||||
- Assertions: ${stats.assertions}
|
||||
`)
|
||||
}
|
||||
|
||||
function log (text) {
|
||||
process.stdout.write(text + '\n')
|
||||
}
|
||||
|
||||
function now () {
|
||||
const ts = process.hrtime()
|
||||
return (ts[0] * 1e3) + (ts[1] / 1e6)
|
||||
}
|
||||
|
||||
function parse (data) {
|
||||
let doc
|
||||
try {
|
||||
doc = yaml.load(data, { schema: yaml.CORE_SCHEMA })
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
function generateJunitXmlReport (junit, suite) {
|
||||
writeFileSync(
|
||||
join(__dirname, '..', '..', `${suite}-report-junit.xml`),
|
||||
junit.prettyPrint()
|
||||
)
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
const node = process.env.TEST_ES_SERVER || 'https://elastic:changeme@localhost:9200'
|
||||
const opts = {
|
||||
node,
|
||||
isXPack: process.env.TEST_SUITE !== 'free'
|
||||
}
|
||||
runner(opts)
|
||||
}
|
||||
|
||||
const shouldSkip = (isXPack, file, name) => {
|
||||
let list = Object.keys(freeSkips)
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const freeTest = freeSkips[list[i]]
|
||||
for (let j = 0; j < freeTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === freeTest[j] || freeTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
log(`Skipping test ${testName} because is blacklisted in the free test`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (file.includes('x-pack') || isXPack) {
|
||||
list = Object.keys(platinumBlackList)
|
||||
for (let i = 0; i < list.length; i++) {
|
||||
const platTest = platinumBlackList[list[i]]
|
||||
for (let j = 0; j < platTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
log(`Skipping test ${testName} because is blacklisted in the platinum test`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
const getAllFiles = dir =>
|
||||
readdirSync(dir).reduce((files, file) => {
|
||||
const name = join(dir, file)
|
||||
const isDirectory = statSync(name).isDirectory()
|
||||
return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name]
|
||||
}, [])
|
||||
|
||||
module.exports = runner
|
||||
109
test/integration/integration/reporter.js
Normal file
109
test/integration/integration/reporter.js
Normal file
@ -0,0 +1,109 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('assert')
|
||||
const { create } = require('xmlbuilder2')
|
||||
|
||||
function createJunitReporter () {
|
||||
const report = {}
|
||||
|
||||
return { testsuites, prettyPrint }
|
||||
|
||||
function prettyPrint () {
|
||||
return create(report).end({ prettyPrint: true })
|
||||
}
|
||||
|
||||
function testsuites (name) {
|
||||
assert(name, 'The testsuites name is required')
|
||||
assert(report.testsuites === undefined, 'Cannot set more than one testsuites block')
|
||||
const startTime = Date.now()
|
||||
|
||||
report.testsuites = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
|
||||
const testsuiteList = []
|
||||
|
||||
return {
|
||||
testsuite: createTestSuite(testsuiteList),
|
||||
end () {
|
||||
report.testsuites['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
report.testsuites['@tests'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@tests']
|
||||
return acc
|
||||
}, 0)
|
||||
report.testsuites['@failures'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@failures']
|
||||
return acc
|
||||
}, 0)
|
||||
report.testsuites['@skipped'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@skipped']
|
||||
return acc
|
||||
}, 0)
|
||||
if (testsuiteList.length) {
|
||||
report.testsuites.testsuite = testsuiteList
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createTestSuite (testsuiteList) {
|
||||
return function testsuite (name) {
|
||||
assert(name, 'The testsuite name is required')
|
||||
const startTime = Date.now()
|
||||
const suite = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
const testcaseList = []
|
||||
testsuiteList.push(suite)
|
||||
return {
|
||||
testcase: createTestCase(testcaseList),
|
||||
end () {
|
||||
suite['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
suite['@tests'] = testcaseList.length
|
||||
suite['@failures'] = testcaseList.filter(t => t.failure).length
|
||||
suite['@skipped'] = testcaseList.filter(t => t.skipped).length
|
||||
if (testcaseList.length) {
|
||||
suite.testcase = testcaseList
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createTestCase (testcaseList) {
|
||||
return function testcase (name) {
|
||||
assert(name, 'The testcase name is required')
|
||||
const startTime = Date.now()
|
||||
const tcase = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
testcaseList.push(tcase)
|
||||
return {
|
||||
failure (error) {
|
||||
assert(error, 'The failure error object is required')
|
||||
tcase.failure = {
|
||||
'#': error.stack,
|
||||
'@message': error.message,
|
||||
'@type': error.code
|
||||
}
|
||||
},
|
||||
skip (reason) {
|
||||
if (typeof reason !== 'string') {
|
||||
reason = JSON.stringify(reason, null, 2)
|
||||
}
|
||||
tcase.skipped = {
|
||||
'#': reason
|
||||
}
|
||||
},
|
||||
end () {
|
||||
tcase['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createJunitReporter
|
||||
909
test/integration/integration/test-runner.js
Normal file
909
test/integration/integration/test-runner.js
Normal file
@ -0,0 +1,909 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
|
||||
const assert = require('assert')
|
||||
const semver = require('semver')
|
||||
const helper = require('./helper')
|
||||
const deepEqual = require('fast-deep-equal')
|
||||
const { join } = require('path')
|
||||
const { locations } = require('../../scripts/download-artifacts')
|
||||
const { ConfigurationError } = require('../../lib/errors')
|
||||
|
||||
const { delve, to, isXPackTemplate, sleep } = helper
|
||||
|
||||
const supportedFeatures = [
|
||||
'gtelte',
|
||||
'regex',
|
||||
'benchmark',
|
||||
'stash_in_path',
|
||||
'groovy_scripting',
|
||||
'headers',
|
||||
'transform_and_set',
|
||||
'catch_unauthorized',
|
||||
'arbitrary_key'
|
||||
]
|
||||
|
||||
function build (opts = {}) {
|
||||
const client = opts.client
|
||||
const esVersion = opts.version
|
||||
const isXPack = opts.isXPack
|
||||
const stash = new Map()
|
||||
let response = null
|
||||
|
||||
/**
|
||||
* Runs a cleanup, removes all indices, aliases, templates, and snapshots
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async function cleanup (isXPack) {
|
||||
response = null
|
||||
stash.clear()
|
||||
|
||||
if (isXPack) {
|
||||
// wipe rollup jobs
|
||||
const { body: jobsList } = await client.rollup.getJobs({ id: '_all' })
|
||||
const jobsIds = jobsList.jobs.map(j => j.config.id)
|
||||
await helper.runInParallel(
|
||||
client, 'rollup.stopJob',
|
||||
jobsIds.map(j => ({ id: j, waitForCompletion: true }))
|
||||
)
|
||||
await helper.runInParallel(
|
||||
client, 'rollup.deleteJob',
|
||||
jobsIds.map(j => ({ id: j }))
|
||||
)
|
||||
|
||||
// delete slm policies
|
||||
const { body: policies } = await client.slm.getLifecycle()
|
||||
await helper.runInParallel(
|
||||
client, 'slm.deleteLifecycle',
|
||||
Object.keys(policies).map(p => ({ policy_id: p }))
|
||||
)
|
||||
|
||||
// remove 'x_pack_rest_user', used in some xpack test
|
||||
await client.security.deleteUser({ username: 'x_pack_rest_user' }, { ignore: [404] })
|
||||
|
||||
const { body: searchableSnapshotIndices } = await client.cluster.state({
|
||||
metric: 'metadata',
|
||||
filter_path: 'metadata.indices.*.settings.index.store.snapshot'
|
||||
})
|
||||
if (searchableSnapshotIndices.metadata != null && searchableSnapshotIndices.metadata.indices != null) {
|
||||
await helper.runInParallel(
|
||||
client, 'indices.delete',
|
||||
Object.keys(searchableSnapshotIndices.metadata.indices).map(i => ({ index: i })),
|
||||
{ ignore: [404] }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// clean snapshots
|
||||
const { body: repositories } = await client.snapshot.getRepository()
|
||||
for (const repository of Object.keys(repositories)) {
|
||||
await client.snapshot.delete({ repository, snapshot: '*' }, { ignore: [404] })
|
||||
await client.snapshot.deleteRepository({ repository }, { ignore: [404] })
|
||||
}
|
||||
|
||||
if (isXPack) {
|
||||
// clean data streams
|
||||
await client.indices.deleteDataStream({ name: '*' })
|
||||
}
|
||||
|
||||
// clean all indices
|
||||
await client.indices.delete({ index: '*,-.ds-ilm-history-*', expand_wildcards: 'open,closed,hidden' }, { ignore: [404] })
|
||||
|
||||
// delete templates
|
||||
const { body: templates } = await client.cat.templates({ h: 'name' })
|
||||
for (const template of templates.split('\n').filter(Boolean)) {
|
||||
if (isXPackTemplate(template)) continue
|
||||
const { body } = await client.indices.deleteTemplate({ name: template }, { ignore: [404] })
|
||||
if (JSON.stringify(body).includes(`index_template [${template}] missing`)) {
|
||||
await client.indices.deleteIndexTemplate({ name: template }, { ignore: [404] })
|
||||
}
|
||||
}
|
||||
|
||||
// delete component template
|
||||
const { body } = await client.cluster.getComponentTemplate()
|
||||
const components = body.component_templates.filter(c => !isXPackTemplate(c.name)).map(c => c.name)
|
||||
if (components.length > 0) {
|
||||
await client.cluster.deleteComponentTemplate({ name: components.join(',') }, { ignore: [404] })
|
||||
}
|
||||
|
||||
// Remove any cluster setting
|
||||
const { body: settings } = await client.cluster.getSettings()
|
||||
const newSettings = {}
|
||||
for (const setting in settings) {
|
||||
if (Object.keys(settings[setting]).length === 0) continue
|
||||
newSettings[setting] = {}
|
||||
for (const key in settings[setting]) {
|
||||
newSettings[setting][`${key}.*`] = null
|
||||
}
|
||||
}
|
||||
if (Object.keys(newSettings).length > 0) {
|
||||
await client.cluster.putSettings({ body: newSettings })
|
||||
}
|
||||
|
||||
if (isXPack) {
|
||||
// delete ilm policies
|
||||
const preserveIlmPolicies = [
|
||||
'ilm-history-ilm-policy', 'slm-history-ilm-policy',
|
||||
'watch-history-ilm-policy', 'ml-size-based-ilm-policy',
|
||||
'logs', 'metrics'
|
||||
]
|
||||
const { body: policies } = await client.ilm.getLifecycle()
|
||||
for (const policy in policies) {
|
||||
if (preserveIlmPolicies.includes(policy)) continue
|
||||
await client.ilm.deleteLifecycle({ policy })
|
||||
}
|
||||
|
||||
// delete autofollow patterns
|
||||
const { body: patterns } = await client.ccr.getAutoFollowPattern()
|
||||
for (const { name } of patterns.patterns) {
|
||||
await client.ccr.deleteAutoFollowPattern({ name })
|
||||
}
|
||||
|
||||
// delete all tasks
|
||||
const { body: nodesTask } = await client.tasks.list()
|
||||
const tasks = Object.keys(nodesTask.nodes)
|
||||
.reduce((acc, node) => {
|
||||
const { tasks } = nodesTask.nodes[node]
|
||||
Object.keys(tasks).forEach(id => {
|
||||
if (tasks[id].cancellable) acc.push(id)
|
||||
})
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
await helper.runInParallel(
|
||||
client, 'tasks.cancel',
|
||||
tasks.map(id => ({ taskId: id }))
|
||||
)
|
||||
}
|
||||
|
||||
const { body: shutdownNodes } = await client.shutdown.getNode()
|
||||
if (shutdownNodes._nodes == null && shutdownNodes.cluster_name == null) {
|
||||
for (const node of shutdownNodes.nodes) {
|
||||
await client.shutdown.deleteNode({ node_id: node.node_id })
|
||||
}
|
||||
}
|
||||
|
||||
// wait for pending task before resolving the promise
|
||||
await sleep(100)
|
||||
while (true) {
|
||||
const { body } = await client.cluster.pendingTasks()
|
||||
if (body.tasks.length === 0) break
|
||||
await sleep(500)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs the given test.
|
||||
* It runs the test components in the following order:
|
||||
* - skip check
|
||||
* - xpack user
|
||||
* - setup
|
||||
* - the actual test
|
||||
* - teardown
|
||||
* - xpack cleanup
|
||||
* - cleanup
|
||||
* @param {object} setup (null if not needed)
|
||||
* @param {object} test
|
||||
* @oaram {object} teardown (null if not needed)
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async function run (setup, test, teardown, stats, junit) {
|
||||
// if we should skip a feature in the setup/teardown section
|
||||
// we should skip the entire test file
|
||||
const skip = getSkip(setup) || getSkip(teardown)
|
||||
if (skip && shouldSkip(esVersion, skip)) {
|
||||
junit.skip(skip)
|
||||
logSkip(skip)
|
||||
return
|
||||
}
|
||||
|
||||
if (isXPack) {
|
||||
// Some xpack test requires this user
|
||||
// tap.comment('Creating x-pack user')
|
||||
try {
|
||||
await client.security.putUser({
|
||||
username: 'x_pack_rest_user',
|
||||
body: { password: 'x-pack-test-password', roles: ['superuser'] }
|
||||
})
|
||||
} catch (err) {
|
||||
assert.ifError(err, 'should not error: security.putUser')
|
||||
}
|
||||
}
|
||||
|
||||
if (setup) await exec('Setup', setup, stats, junit)
|
||||
|
||||
await exec('Test', test, stats, junit)
|
||||
|
||||
if (teardown) await exec('Teardown', teardown, stats, junit)
|
||||
|
||||
await cleanup(isXPack)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fill the stashed values of a command
|
||||
* let's say the we have stashed the `master` value,
|
||||
* is_true: nodes.$master.transport.profiles
|
||||
* becomes
|
||||
* is_true: nodes.new_value.transport.profiles
|
||||
* @param {object|string} the action to update
|
||||
* @returns {object|string} the updated action
|
||||
*/
|
||||
function fillStashedValues (obj) {
|
||||
if (typeof obj === 'string') {
|
||||
return getStashedValues(obj)
|
||||
}
|
||||
// iterate every key of the object
|
||||
for (const key in obj) {
|
||||
const val = obj[key]
|
||||
// if the key value is a string, and the string includes '${'
|
||||
// that we must update the content of '${...}'.
|
||||
// eg: 'Basic ${auth}' we search the stahed value 'auth'
|
||||
// and the resulting value will be 'Basic valueOfAuth'
|
||||
if (typeof val === 'string' && val.includes('${')) {
|
||||
while (obj[key].includes('${')) {
|
||||
const val = obj[key]
|
||||
const start = val.indexOf('${')
|
||||
const end = val.indexOf('}', val.indexOf('${'))
|
||||
const stashedKey = val.slice(start + 2, end)
|
||||
const stashed = stash.get(stashedKey)
|
||||
obj[key] = val.slice(0, start) + stashed + val.slice(end + 1)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// handle json strings, eg: '{"hello":"$world"}'
|
||||
if (typeof val === 'string' && val.includes('"$')) {
|
||||
while (obj[key].includes('"$')) {
|
||||
const val = obj[key]
|
||||
const start = val.indexOf('"$')
|
||||
const end = val.indexOf('"', start + 1)
|
||||
const stashedKey = val.slice(start + 2, end)
|
||||
const stashed = '"' + stash.get(stashedKey) + '"'
|
||||
obj[key] = val.slice(0, start) + stashed + val.slice(end + 1)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// if the key value is a string, and the string includes '$'
|
||||
// we run the "update value" code
|
||||
if (typeof val === 'string' && val.includes('$')) {
|
||||
// update the key value
|
||||
obj[key] = getStashedValues(val)
|
||||
continue
|
||||
}
|
||||
|
||||
// go deep in the object
|
||||
if (val !== null && typeof val === 'object') {
|
||||
fillStashedValues(val)
|
||||
}
|
||||
}
|
||||
|
||||
return obj
|
||||
|
||||
function getStashedValues (str) {
|
||||
const arr = str
|
||||
// we split the string on the dots
|
||||
// handle the key with a dot inside that is not a part of the path
|
||||
.split(/(?<!\\)\./g)
|
||||
// we update every field that start with '$'
|
||||
.map(part => {
|
||||
if (part[0] === '$') {
|
||||
const stashed = stash.get(part.slice(1))
|
||||
if (stashed == null) {
|
||||
throw new Error(`Cannot find stashed value '${part}' for '${JSON.stringify(obj)}'`)
|
||||
}
|
||||
return stashed
|
||||
}
|
||||
return part
|
||||
})
|
||||
|
||||
// recreate the string value only if the array length is higher than one
|
||||
// otherwise return the first element which in some test this could be a number,
|
||||
// and call `.join` will coerce it to a string.
|
||||
return arr.length > 1 ? arr.join('.') : arr[0]
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stashes a value
|
||||
* @param {string} the key to search in the previous response
|
||||
* @param {string} the name to identify the stashed value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function set (key, name) {
|
||||
if (key.includes('_arbitrary_key_')) {
|
||||
let currentVisit = null
|
||||
for (const path of key.split('.')) {
|
||||
if (path === '_arbitrary_key_') {
|
||||
const keys = Object.keys(currentVisit)
|
||||
const arbitraryKey = keys[getRandomInt(0, keys.length)]
|
||||
stash.set(name, arbitraryKey)
|
||||
} else {
|
||||
currentVisit = delve(response, path)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
stash.set(name, delve(response, key))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies a given transformation and stashes the result.
|
||||
* @param {string} the name to identify the stashed value
|
||||
* @param {string} the transformation function as string
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function transform_and_set (name, transform) {
|
||||
if (/base64EncodeCredentials/.test(transform)) {
|
||||
const [user, password] = transform
|
||||
.slice(transform.indexOf('(') + 1, -1)
|
||||
.replace(/ /g, '')
|
||||
.split(',')
|
||||
const userAndPassword = `${delve(response, user)}:${delve(response, password)}`
|
||||
stash.set(name, Buffer.from(userAndPassword).toString('base64'))
|
||||
} else {
|
||||
throw new Error(`Unknown transform: '${transform}'`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs a client command
|
||||
* @param {object} the action to perform
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async function doAction (action, stats) {
|
||||
const cmd = parseDo(action)
|
||||
let api
|
||||
try {
|
||||
api = delve(client, cmd.method).bind(client)
|
||||
} catch (err) {
|
||||
console.error(`\nError: Cannot find the method '${cmd.method}' in the client.\n`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const options = { ignore: cmd.params.ignore, headers: action.headers }
|
||||
if (!Array.isArray(options.ignore)) options.ignore = [options.ignore]
|
||||
if (cmd.params.ignore) delete cmd.params.ignore
|
||||
|
||||
// ndjson apis should always send the body as an array
|
||||
if (isNDJson(cmd.api) && !Array.isArray(cmd.params.body)) {
|
||||
cmd.params.body = [cmd.params.body]
|
||||
}
|
||||
|
||||
const [err, result] = await to(api(cmd.params, options))
|
||||
let warnings = result ? result.warnings : null
|
||||
const body = result ? result.body : null
|
||||
|
||||
if (action.warnings && warnings === null) {
|
||||
assert.fail('We should get a warning header', action.warnings)
|
||||
} else if (!action.warnings && warnings !== null) {
|
||||
// if there is only the 'default shard will change'
|
||||
// warning we skip the check, because the yaml
|
||||
// spec may not be updated
|
||||
let hasDefaultShardsWarning = false
|
||||
warnings.forEach(h => {
|
||||
if (/default\snumber\sof\sshards/g.test(h)) {
|
||||
hasDefaultShardsWarning = true
|
||||
}
|
||||
})
|
||||
|
||||
if (hasDefaultShardsWarning === true && warnings.length > 1) {
|
||||
assert.fail('We are not expecting warnings', warnings)
|
||||
}
|
||||
} else if (action.warnings && warnings !== null) {
|
||||
// if the yaml warnings do not contain the
|
||||
// 'default shard will change' warning
|
||||
// we do not check it presence in the warnings array
|
||||
// because the yaml spec may not be updated
|
||||
let hasDefaultShardsWarning = false
|
||||
action.warnings.forEach(h => {
|
||||
if (/default\snumber\sof\sshards/g.test(h)) {
|
||||
hasDefaultShardsWarning = true
|
||||
}
|
||||
})
|
||||
|
||||
if (hasDefaultShardsWarning === false) {
|
||||
warnings = warnings.filter(h => !h.test(/default\snumber\sof\sshards/g))
|
||||
}
|
||||
|
||||
stats.assertions += 1
|
||||
assert.ok(deepEqual(warnings, action.warnings))
|
||||
}
|
||||
|
||||
if (action.catch) {
|
||||
stats.assertions += 1
|
||||
assert.ok(
|
||||
parseDoError(err, action.catch),
|
||||
`the error should be: ${action.catch}`
|
||||
)
|
||||
try {
|
||||
response = JSON.parse(err.body)
|
||||
} catch (e) {
|
||||
response = err.body
|
||||
}
|
||||
} else {
|
||||
stats.assertions += 1
|
||||
assert.ifError(err, `should not error: ${cmd.method}`, action)
|
||||
response = body
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs an actual test
|
||||
* @param {string} the name of the test
|
||||
* @param {object} the actions to perform
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async function exec (name, actions, stats, junit) {
|
||||
// tap.comment(name)
|
||||
for (const action of actions) {
|
||||
if (action.skip) {
|
||||
if (shouldSkip(esVersion, action.skip)) {
|
||||
junit.skip(fillStashedValues(action.skip))
|
||||
logSkip(fillStashedValues(action.skip))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (action.do) {
|
||||
await doAction(fillStashedValues(action.do), stats)
|
||||
}
|
||||
|
||||
if (action.set) {
|
||||
const key = Object.keys(action.set)[0]
|
||||
set(fillStashedValues(key), action.set[key])
|
||||
}
|
||||
|
||||
if (action.transform_and_set) {
|
||||
const key = Object.keys(action.transform_and_set)[0]
|
||||
transform_and_set(key, action.transform_and_set[key])
|
||||
}
|
||||
|
||||
if (action.match) {
|
||||
stats.assertions += 1
|
||||
const key = Object.keys(action.match)[0]
|
||||
match(
|
||||
// in some cases, the yaml refers to the body with an empty string
|
||||
key === '$body' || key === ''
|
||||
? response
|
||||
: delve(response, fillStashedValues(key)),
|
||||
key === '$body'
|
||||
? action.match[key]
|
||||
: fillStashedValues(action.match)[key],
|
||||
action.match
|
||||
)
|
||||
}
|
||||
|
||||
if (action.lt) {
|
||||
stats.assertions += 1
|
||||
const key = Object.keys(action.lt)[0]
|
||||
lt(
|
||||
delve(response, fillStashedValues(key)),
|
||||
fillStashedValues(action.lt)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.gt) {
|
||||
stats.assertions += 1
|
||||
const key = Object.keys(action.gt)[0]
|
||||
gt(
|
||||
delve(response, fillStashedValues(key)),
|
||||
fillStashedValues(action.gt)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.lte) {
|
||||
stats.assertions += 1
|
||||
const key = Object.keys(action.lte)[0]
|
||||
lte(
|
||||
delve(response, fillStashedValues(key)),
|
||||
fillStashedValues(action.lte)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.gte) {
|
||||
stats.assertions += 1
|
||||
const key = Object.keys(action.gte)[0]
|
||||
gte(
|
||||
delve(response, fillStashedValues(key)),
|
||||
fillStashedValues(action.gte)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.length) {
|
||||
stats.assertions += 1
|
||||
const key = Object.keys(action.length)[0]
|
||||
length(
|
||||
key === '$body' || key === ''
|
||||
? response
|
||||
: delve(response, fillStashedValues(key)),
|
||||
key === '$body'
|
||||
? action.length[key]
|
||||
: fillStashedValues(action.length)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.is_true) {
|
||||
stats.assertions += 1
|
||||
const isTrue = fillStashedValues(action.is_true)
|
||||
is_true(
|
||||
delve(response, isTrue),
|
||||
isTrue
|
||||
)
|
||||
}
|
||||
|
||||
if (action.is_false) {
|
||||
stats.assertions += 1
|
||||
const isFalse = fillStashedValues(action.is_false)
|
||||
is_false(
|
||||
delve(response, isFalse),
|
||||
isFalse
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { run }
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the given value is truthy
|
||||
* @param {any} the value to check
|
||||
* @param {string} an optional message
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function is_true (val, msg) {
|
||||
assert.ok(val, `expect truthy value: ${msg} - value: ${JSON.stringify(val)}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the given value is falsey
|
||||
* @param {any} the value to check
|
||||
* @param {string} an optional message
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function is_false (val, msg) {
|
||||
assert.ok(!val, `expect falsey value: ${msg} - value: ${JSON.stringify(val)}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that two values are the same
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function match (val1, val2, action) {
|
||||
// both values are objects
|
||||
if (typeof val1 === 'object' && typeof val2 === 'object') {
|
||||
assert.ok(deepEqual(val1, val2), action)
|
||||
// the first value is the body as string and the second a pattern string
|
||||
} else if (
|
||||
typeof val1 === 'string' && typeof val2 === 'string' &&
|
||||
val2.startsWith('/') && (val2.endsWith('/\n') || val2.endsWith('/'))
|
||||
) {
|
||||
const regStr = val2
|
||||
// match all comments within a "regexp" match arg
|
||||
.replace(/([\S\s]?)#[^\n]*\n/g, (match, prevChar) => {
|
||||
return prevChar === '\\' ? match : `${prevChar}\n`
|
||||
})
|
||||
// remove all whitespace from the expression, all meaningful
|
||||
// whitespace is represented with \s
|
||||
.replace(/\s/g, '')
|
||||
.slice(1, -1)
|
||||
// 'm' adds the support for multiline regex
|
||||
assert.ok(new RegExp(regStr, 'm').test(val1), `should match pattern provided: ${val2}, action: ${JSON.stringify(action)}`)
|
||||
// tap.match(val1, new RegExp(regStr, 'm'), `should match pattern provided: ${val2}, action: ${JSON.stringify(action)}`)
|
||||
// everything else
|
||||
} else {
|
||||
assert.equal(val1, val2, `should be equal: ${val1} - ${val2}, action: ${JSON.stringify(action)}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the first value is less than the second
|
||||
* It also verifies that the two values are numbers
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function lt (val1, val2) {
|
||||
;[val1, val2] = getNumbers(val1, val2)
|
||||
assert.ok(val1 < val2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the first value is greater than the second
|
||||
* It also verifies that the two values are numbers
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function gt (val1, val2) {
|
||||
;[val1, val2] = getNumbers(val1, val2)
|
||||
assert.ok(val1 > val2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the first value is less than or equal the second
|
||||
* It also verifies that the two values are numbers
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function lte (val1, val2) {
|
||||
;[val1, val2] = getNumbers(val1, val2)
|
||||
assert.ok(val1 <= val2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the first value is greater than or equal the second
|
||||
* It also verifies that the two values are numbers
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function gte (val1, val2) {
|
||||
;[val1, val2] = getNumbers(val1, val2)
|
||||
assert.ok(val1 >= val2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the given value has the specified length
|
||||
* @param {string|object|array} the object to check
|
||||
* @param {number} the expected length
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function length (val, len) {
|
||||
if (typeof val === 'string' || Array.isArray(val)) {
|
||||
assert.equal(val.length, len)
|
||||
} else if (typeof val === 'object' && val !== null) {
|
||||
assert.equal(Object.keys(val).length, len)
|
||||
} else {
|
||||
assert.fail(`length: the given value is invalid: ${val}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a `do` action object and returns a structured object,
|
||||
* where the action is the key and the parameter is the value.
|
||||
* Eg:
|
||||
* {
|
||||
* 'indices.create': {
|
||||
* 'index': 'test'
|
||||
* },
|
||||
* 'warnings': [
|
||||
* '[index] is deprecated'
|
||||
* ]
|
||||
* }
|
||||
* becomes
|
||||
* {
|
||||
* method: 'indices.create',
|
||||
* params: {
|
||||
* index: 'test'
|
||||
* },
|
||||
* warnings: [
|
||||
* '[index] is deprecated'
|
||||
* ]
|
||||
* }
|
||||
* @param {object}
|
||||
* @returns {object}
|
||||
*/
|
||||
function parseDo (action) {
|
||||
return Object.keys(action).reduce((acc, val) => {
|
||||
switch (val) {
|
||||
case 'catch':
|
||||
acc.catch = action.catch
|
||||
break
|
||||
case 'warnings':
|
||||
acc.warnings = action.warnings
|
||||
break
|
||||
case 'node_selector':
|
||||
acc.node_selector = action.node_selector
|
||||
break
|
||||
default:
|
||||
// converts underscore to camelCase
|
||||
// eg: put_mapping => putMapping
|
||||
acc.method = val.replace(/_([a-z])/g, g => g[1].toUpperCase())
|
||||
acc.api = val
|
||||
acc.params = camelify(action[val])
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
function camelify (obj) {
|
||||
const newObj = {}
|
||||
|
||||
// TODO: add camelCase support for this fields
|
||||
const doNotCamelify = ['copy_settings']
|
||||
|
||||
for (const key in obj) {
|
||||
const val = obj[key]
|
||||
let newKey = key
|
||||
if (!~doNotCamelify.indexOf(key)) {
|
||||
// if the key starts with `_` we should not camelify the first occurence
|
||||
// eg: _source_include => _sourceInclude
|
||||
newKey = key[0] === '_'
|
||||
? '_' + key.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: key.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
if (
|
||||
val !== null &&
|
||||
typeof val === 'object' &&
|
||||
!Array.isArray(val) &&
|
||||
key !== 'body'
|
||||
) {
|
||||
newObj[newKey] = camelify(val)
|
||||
} else {
|
||||
newObj[newKey] = val
|
||||
}
|
||||
}
|
||||
|
||||
return newObj
|
||||
}
|
||||
}
|
||||
|
||||
function parseDoError (err, spec) {
|
||||
const httpErrors = {
|
||||
bad_request: 400,
|
||||
unauthorized: 401,
|
||||
forbidden: 403,
|
||||
missing: 404,
|
||||
request_timeout: 408,
|
||||
conflict: 409,
|
||||
unavailable: 503
|
||||
}
|
||||
|
||||
if (httpErrors[spec]) {
|
||||
return err.statusCode === httpErrors[spec]
|
||||
}
|
||||
|
||||
if (spec === 'request') {
|
||||
return err.statusCode >= 400 && err.statusCode < 600
|
||||
}
|
||||
|
||||
if (spec.startsWith('/') && spec.endsWith('/')) {
|
||||
return new RegExp(spec.slice(1, -1), 'g').test(JSON.stringify(err.body))
|
||||
}
|
||||
|
||||
if (spec === 'param') {
|
||||
return err instanceof ConfigurationError
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
function getSkip (arr) {
|
||||
if (!Array.isArray(arr)) return null
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
if (arr[i].skip) return arr[i].skip
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// Gets two *maybe* numbers and returns two valida numbers
|
||||
// it throws if one or both are not a valid number
|
||||
// the returned value is an array with the new values
|
||||
function getNumbers (val1, val2) {
|
||||
const val1Numeric = Number(val1)
|
||||
if (isNaN(val1Numeric)) {
|
||||
throw new TypeError(`val1 is not a valid number: ${val1}`)
|
||||
}
|
||||
const val2Numeric = Number(val2)
|
||||
if (isNaN(val2Numeric)) {
|
||||
throw new TypeError(`val2 is not a valid number: ${val2}`)
|
||||
}
|
||||
return [val1Numeric, val2Numeric]
|
||||
}
|
||||
|
||||
function getRandomInt (min, max) {
|
||||
return Math.floor(Math.random() * (max - min)) + min
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a skip
|
||||
* @param {object} the actions
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function logSkip (action) {
|
||||
if (action.reason && action.version) {
|
||||
console.log(`Skip: ${action.reason} (${action.version})`)
|
||||
} else if (action.features) {
|
||||
console.log(`Skip: ${JSON.stringify(action.features)})`)
|
||||
} else {
|
||||
console.log('Skipped')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decides if a test should be skipped
|
||||
* @param {object} the actions
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function shouldSkip (esVersion, action) {
|
||||
let shouldSkip = false
|
||||
// skip based on the version
|
||||
if (action.version) {
|
||||
if (action.version.trim() === 'all') return true
|
||||
const versions = action.version.split(',').filter(Boolean)
|
||||
for (const version of versions) {
|
||||
const [min, max] = version.split('-').map(v => v.trim())
|
||||
// if both `min` and `max` are specified
|
||||
if (min && max) {
|
||||
shouldSkip = semver.satisfies(esVersion, action.version)
|
||||
// if only `min` is specified
|
||||
} else if (min) {
|
||||
shouldSkip = semver.gte(esVersion, min)
|
||||
// if only `max` is specified
|
||||
} else if (max) {
|
||||
shouldSkip = semver.lte(esVersion, max)
|
||||
// something went wrong!
|
||||
} else {
|
||||
throw new Error(`skip: Bad version range: ${action.version}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldSkip) return true
|
||||
|
||||
if (action.features) {
|
||||
if (!Array.isArray(action.features)) action.features = [action.features]
|
||||
// returns true if one of the features is not present in the supportedFeatures
|
||||
shouldSkip = !!action.features.filter(f => !~supportedFeatures.indexOf(f)).length
|
||||
}
|
||||
|
||||
if (shouldSkip) return true
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
function isNDJson (api) {
|
||||
const spec = require(join(locations.specFolder, `${api}.json`))
|
||||
const { content_type } = spec[Object.keys(spec)[0]].headers
|
||||
return Boolean(content_type && content_type.includes('application/x-ndjson'))
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the array syntax of keys and values
|
||||
* eg: 'hits.hits.1.stuff' to 'hits.hits[1].stuff'
|
||||
* @param {object} the action to update
|
||||
* @returns {obj} the updated action
|
||||
*/
|
||||
// function updateArraySyntax (obj) {
|
||||
// const newObj = {}
|
||||
|
||||
// for (const key in obj) {
|
||||
// const newKey = key.replace(/\.\d{1,}\./g, v => `[${v.slice(1, -1)}].`)
|
||||
// const val = obj[key]
|
||||
|
||||
// if (typeof val === 'string') {
|
||||
// newObj[newKey] = val.replace(/\.\d{1,}\./g, v => `[${v.slice(1, -1)}].`)
|
||||
// } else if (val !== null && typeof val === 'object') {
|
||||
// newObj[newKey] = updateArraySyntax(val)
|
||||
// } else {
|
||||
// newObj[newKey] = val
|
||||
// }
|
||||
// }
|
||||
|
||||
// return newObj
|
||||
// }
|
||||
|
||||
module.exports = build
|
||||
@ -27,7 +27,7 @@ const helper = require('./helper')
|
||||
const deepEqual = require('fast-deep-equal')
|
||||
const { join } = require('path')
|
||||
const { locations } = require('../../scripts/download-artifacts')
|
||||
const { ConfigurationError } = require('../../lib/errors')
|
||||
const packageJson = require('../../package.json')
|
||||
|
||||
const { delve, to, isXPackTemplate, sleep } = helper
|
||||
|
||||
@ -60,11 +60,11 @@ function build (opts = {}) {
|
||||
|
||||
if (isXPack) {
|
||||
// wipe rollup jobs
|
||||
const { body: jobsList } = await client.rollup.getJobs({ id: '_all' })
|
||||
const jobsList = await client.rollup.getJobs({ id: '_all' })
|
||||
const jobsIds = jobsList.jobs.map(j => j.config.id)
|
||||
await helper.runInParallel(
|
||||
client, 'rollup.stopJob',
|
||||
jobsIds.map(j => ({ id: j, waitForCompletion: true }))
|
||||
jobsIds.map(j => ({ id: j, wait_for_completion: true }))
|
||||
)
|
||||
await helper.runInParallel(
|
||||
client, 'rollup.deleteJob',
|
||||
@ -72,7 +72,7 @@ function build (opts = {}) {
|
||||
)
|
||||
|
||||
// delete slm policies
|
||||
const { body: policies } = await client.slm.getLifecycle()
|
||||
const policies = await client.slm.getLifecycle()
|
||||
await helper.runInParallel(
|
||||
client, 'slm.deleteLifecycle',
|
||||
Object.keys(policies).map(p => ({ policy_id: p }))
|
||||
@ -81,7 +81,7 @@ function build (opts = {}) {
|
||||
// remove 'x_pack_rest_user', used in some xpack test
|
||||
await client.security.deleteUser({ username: 'x_pack_rest_user' }, { ignore: [404] })
|
||||
|
||||
const { body: searchableSnapshotIndices } = await client.cluster.state({
|
||||
const searchableSnapshotIndices = await client.cluster.state({
|
||||
metric: 'metadata',
|
||||
filter_path: 'metadata.indices.*.settings.index.store.snapshot'
|
||||
})
|
||||
@ -95,7 +95,7 @@ function build (opts = {}) {
|
||||
}
|
||||
|
||||
// clean snapshots
|
||||
const { body: repositories } = await client.snapshot.getRepository()
|
||||
const repositories = await client.snapshot.getRepository()
|
||||
for (const repository of Object.keys(repositories)) {
|
||||
await client.snapshot.delete({ repository, snapshot: '*' }, { ignore: [404] })
|
||||
await client.snapshot.deleteRepository({ repository }, { ignore: [404] })
|
||||
@ -110,24 +110,24 @@ function build (opts = {}) {
|
||||
await client.indices.delete({ index: '*,-.ds-ilm-history-*', expand_wildcards: 'open,closed,hidden' }, { ignore: [404] })
|
||||
|
||||
// delete templates
|
||||
const { body: templates } = await client.cat.templates({ h: 'name' })
|
||||
const templates = await client.cat.templates({ h: 'name' })
|
||||
for (const template of templates.split('\n').filter(Boolean)) {
|
||||
if (isXPackTemplate(template)) continue
|
||||
const { body } = await client.indices.deleteTemplate({ name: template }, { ignore: [404] })
|
||||
const body = await client.indices.deleteTemplate({ name: template }, { ignore: [404] })
|
||||
if (JSON.stringify(body).includes(`index_template [${template}] missing`)) {
|
||||
await client.indices.deleteIndexTemplate({ name: template }, { ignore: [404] })
|
||||
}
|
||||
}
|
||||
|
||||
// delete component template
|
||||
const { body } = await client.cluster.getComponentTemplate()
|
||||
const body = await client.cluster.getComponentTemplate()
|
||||
const components = body.component_templates.filter(c => !isXPackTemplate(c.name)).map(c => c.name)
|
||||
if (components.length > 0) {
|
||||
await client.cluster.deleteComponentTemplate({ name: components.join(',') }, { ignore: [404] })
|
||||
}
|
||||
|
||||
// Remove any cluster setting
|
||||
const { body: settings } = await client.cluster.getSettings()
|
||||
const settings = await client.cluster.getSettings()
|
||||
const newSettings = {}
|
||||
for (const setting in settings) {
|
||||
if (Object.keys(settings[setting]).length === 0) continue
|
||||
@ -137,7 +137,7 @@ function build (opts = {}) {
|
||||
}
|
||||
}
|
||||
if (Object.keys(newSettings).length > 0) {
|
||||
await client.cluster.putSettings({ body: newSettings })
|
||||
await client.cluster.putSettings(newSettings)
|
||||
}
|
||||
|
||||
if (isXPack) {
|
||||
@ -147,20 +147,20 @@ function build (opts = {}) {
|
||||
'watch-history-ilm-policy', 'ml-size-based-ilm-policy',
|
||||
'logs', 'metrics'
|
||||
]
|
||||
const { body: policies } = await client.ilm.getLifecycle()
|
||||
const policies = await client.ilm.getLifecycle()
|
||||
for (const policy in policies) {
|
||||
if (preserveIlmPolicies.includes(policy)) continue
|
||||
await client.ilm.deleteLifecycle({ policy })
|
||||
}
|
||||
|
||||
// delete autofollow patterns
|
||||
const { body: patterns } = await client.ccr.getAutoFollowPattern()
|
||||
const patterns = await client.ccr.getAutoFollowPattern()
|
||||
for (const { name } of patterns.patterns) {
|
||||
await client.ccr.deleteAutoFollowPattern({ name })
|
||||
}
|
||||
|
||||
// delete all tasks
|
||||
const { body: nodesTask } = await client.tasks.list()
|
||||
const nodesTask = await client.tasks.list()
|
||||
const tasks = Object.keys(nodesTask.nodes)
|
||||
.reduce((acc, node) => {
|
||||
const { tasks } = nodesTask.nodes[node]
|
||||
@ -172,11 +172,11 @@ function build (opts = {}) {
|
||||
|
||||
await helper.runInParallel(
|
||||
client, 'tasks.cancel',
|
||||
tasks.map(id => ({ taskId: id }))
|
||||
tasks.map(id => ({ task_id: id }))
|
||||
)
|
||||
}
|
||||
|
||||
const { body: shutdownNodes } = await client.shutdown.getNode()
|
||||
const shutdownNodes = await client.shutdown.getNode()
|
||||
if (shutdownNodes._nodes == null && shutdownNodes.cluster_name == null) {
|
||||
for (const node of shutdownNodes.nodes) {
|
||||
await client.shutdown.deleteNode({ node_id: node.node_id })
|
||||
@ -186,7 +186,7 @@ function build (opts = {}) {
|
||||
// wait for pending task before resolving the promise
|
||||
await sleep(100)
|
||||
while (true) {
|
||||
const { body } = await client.cluster.pendingTasks()
|
||||
const body = await client.cluster.pendingTasks()
|
||||
if (body.tasks.length === 0) break
|
||||
await sleep(500)
|
||||
}
|
||||
@ -223,7 +223,8 @@ function build (opts = {}) {
|
||||
try {
|
||||
await client.security.putUser({
|
||||
username: 'x_pack_rest_user',
|
||||
body: { password: 'x-pack-test-password', roles: ['superuser'] }
|
||||
password: 'x-pack-test-password',
|
||||
roles: ['superuser']
|
||||
})
|
||||
} catch (err) {
|
||||
assert.ifError(err, 'should not error: security.putUser')
|
||||
@ -379,7 +380,22 @@ function build (opts = {}) {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const options = { ignore: cmd.params.ignore, headers: action.headers }
|
||||
if (action.headers) {
|
||||
switch (action.headers['Content-Type'] || action.headers['content-type']) {
|
||||
case 'application/json':
|
||||
delete action.headers['Content-Type']
|
||||
delete action.headers['content-type']
|
||||
action.headers['Content-Type'] = `application/vnd.elasticsearch+json; compatible-with=${packageJson.version.split('.')[0]}`
|
||||
break
|
||||
case 'application/x-ndjson':
|
||||
delete action.headers['Content-Type']
|
||||
delete action.headers['content-type']
|
||||
action.headers['Content-Type'] = `application/vnd.elasticsearch+x-ndjson; compatible-with=${packageJson.version.split('.')[0]}`
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const options = { ignore: cmd.params.ignore, headers: action.headers, meta: true }
|
||||
if (!Array.isArray(options.ignore)) options.ignore = [options.ignore]
|
||||
if (cmd.params.ignore) delete cmd.params.ignore
|
||||
|
||||
@ -388,6 +404,10 @@ function build (opts = {}) {
|
||||
cmd.params.body = [cmd.params.body]
|
||||
}
|
||||
|
||||
if (typeof cmd.params.body === 'string' && !isNDJson(cmd.api)) {
|
||||
cmd.params.body = JSON.parse(cmd.params.body)
|
||||
}
|
||||
|
||||
const [err, result] = await to(api(cmd.params, options))
|
||||
let warnings = result ? result.warnings : null
|
||||
const body = result ? result.body : null
|
||||
@ -707,6 +727,7 @@ function length (val, len) {
|
||||
* @returns {object}
|
||||
*/
|
||||
function parseDo (action) {
|
||||
action = JSON.parse(JSON.stringify(action))
|
||||
return Object.keys(action).reduce((acc, val) => {
|
||||
switch (val) {
|
||||
case 'catch':
|
||||
@ -723,42 +744,42 @@ function parseDo (action) {
|
||||
// eg: put_mapping => putMapping
|
||||
acc.method = val.replace(/_([a-z])/g, g => g[1].toUpperCase())
|
||||
acc.api = val
|
||||
acc.params = camelify(action[val])
|
||||
acc.params = action[val] // camelify(action[val])
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
function camelify (obj) {
|
||||
const newObj = {}
|
||||
// function camelify (obj) {
|
||||
// const newObj = {}
|
||||
|
||||
// TODO: add camelCase support for this fields
|
||||
const doNotCamelify = ['copy_settings']
|
||||
// // TODO: add camelCase support for this fields
|
||||
// const doNotCamelify = ['copy_settings']
|
||||
|
||||
for (const key in obj) {
|
||||
const val = obj[key]
|
||||
let newKey = key
|
||||
if (!~doNotCamelify.indexOf(key)) {
|
||||
// if the key starts with `_` we should not camelify the first occurence
|
||||
// eg: _source_include => _sourceInclude
|
||||
newKey = key[0] === '_'
|
||||
? '_' + key.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: key.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
// for (const key in obj) {
|
||||
// const val = obj[key]
|
||||
// let newKey = key
|
||||
// if (!~doNotCamelify.indexOf(key)) {
|
||||
// // if the key starts with `_` we should not camelify the first occurence
|
||||
// // eg: _source_include => _sourceInclude
|
||||
// newKey = key[0] === '_'
|
||||
// ? '_' + key.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
// : key.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
// }
|
||||
|
||||
if (
|
||||
val !== null &&
|
||||
typeof val === 'object' &&
|
||||
!Array.isArray(val) &&
|
||||
key !== 'body'
|
||||
) {
|
||||
newObj[newKey] = camelify(val)
|
||||
} else {
|
||||
newObj[newKey] = val
|
||||
}
|
||||
}
|
||||
// if (
|
||||
// val !== null &&
|
||||
// typeof val === 'object' &&
|
||||
// !Array.isArray(val) &&
|
||||
// key !== 'body'
|
||||
// ) {
|
||||
// newObj[newKey] = camelify(val)
|
||||
// } else {
|
||||
// newObj[newKey] = val
|
||||
// }
|
||||
// }
|
||||
|
||||
return newObj
|
||||
}
|
||||
// return newObj
|
||||
// }
|
||||
}
|
||||
|
||||
function parseDoError (err, spec) {
|
||||
@ -785,7 +806,10 @@ function parseDoError (err, spec) {
|
||||
}
|
||||
|
||||
if (spec === 'param') {
|
||||
return err instanceof ConfigurationError
|
||||
// the new client do not perform runtime checks,
|
||||
// but it relies on typescript informing the user
|
||||
return true
|
||||
// return err instanceof ConfigurationError
|
||||
}
|
||||
|
||||
return false
|
||||
|
||||
Reference in New Issue
Block a user