Improve integration test execution time (#1005)
* Integration test: Add limit of 3 minutes per yaml file
* Monitor all test files that take more than 1m to execute
* Set the threshold to 30s
* Refactored integration test runner
* Better time reporting
* Updated test time limits
* Updated CI script
* Run oss only in oss build
* Run only oss test
* Revert "Run only oss test"
This reverts commit fd3a07d42d.
This commit is contained in:
committed by
delvedor
parent
0455b76fb8
commit
0f60d78e5d
61
.ci/run-repository.sh
Executable file
61
.ci/run-repository.sh
Executable file
@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env bash
|
||||
# parameters are available to this script
|
||||
|
||||
# ELASTICSEARCH_VERSION -- version e.g Major.Minor.Patch(-Prelease)
|
||||
# ELASTICSEARCH_CONTAINER -- the docker moniker as a reference to know which docker image distribution is used
|
||||
# ELASTICSEARCH_URL -- The url at which elasticsearch is reachable
|
||||
# NETWORK_NAME -- The docker network name
|
||||
# NODE_NAME -- The docker container name also used as Elasticsearch node name
|
||||
# NODE_JS_VERSION -- node js version (defined in test-matrix.yml, a default is hardcoded here)
|
||||
|
||||
NODE_JS_VERSION=${NODE_JS_VERSION-12}
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m URL ${ELASTICSEARCH_URL}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m VERSION ${ELASTICSEARCH_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m CONTAINER ${ELASTICSEARCH_CONTAINER}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m TEST_SUITE ${TEST_SUITE}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m NODE_JS_VERSION ${NODE_JS_VERSION}\033[0m"
|
||||
|
||||
echo -e "\033[1m>>>>> Build docker container >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
set +x
|
||||
export VAULT_TOKEN=$(vault write -field=token auth/approle/login role_id="$VAULT_ROLE_ID" secret_id="$VAULT_SECRET_ID")
|
||||
export CODECOV_TOKEN=$(vault read -field=token secret/clients-ci/elasticsearch-js/codecov)
|
||||
unset VAULT_ROLE_ID VAULT_SECRET_ID VAULT_TOKEN
|
||||
set -x
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
|
||||
.
|
||||
|
||||
echo -e "\033[1m>>>>> NPM run ci >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
repo=$(realpath $(dirname $(realpath -s $0))/../)
|
||||
|
||||
if [[ $TEST_SUITE != "xpack" ]]; then
|
||||
docker run \
|
||||
--network=${NETWORK_NAME} \
|
||||
--env "TEST_ES_SERVER=${ELASTICSEARCH_URL}" \
|
||||
--env "CODECOV_TOKEN" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run ci
|
||||
else
|
||||
docker run \
|
||||
--network=${NETWORK_NAME} \
|
||||
--env "TEST_ES_SERVER=${ELASTICSEARCH_URL}" \
|
||||
--env "CODECOV_TOKEN" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run test:integration
|
||||
fi
|
||||
@ -1,59 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#
|
||||
# Runs the client tests via Docker with the expectation that the required
|
||||
# environment variables have already been exported before running this script.
|
||||
#
|
||||
# The required environment variables include:
|
||||
#
|
||||
# - $ELASTICSEARCH_VERSION
|
||||
# - $NODE_JS_VERSION
|
||||
# - $TEST_SUITE
|
||||
#
|
||||
# Version 1.0
|
||||
# - Moved to .ci folder and seperated out `run-repository.sh`
|
||||
|
||||
set -eo pipefail
|
||||
if [[ -z $ELASTICSEARCH_VERSION ]]; then
|
||||
echo -e "\033[31;1mERROR:\033[0m Required environment variable [ELASTICSEARCH_VERSION] not set\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
set -euxo pipefail
|
||||
|
||||
set +x
|
||||
export VAULT_TOKEN=$(vault write -field=token auth/approle/login role_id="$VAULT_ROLE_ID" secret_id="$VAULT_SECRET_ID")
|
||||
export CODECOV_TOKEN=$(vault read -field=token secret/clients-ci/elasticsearch-js/codecov)
|
||||
unset VAULT_ROLE_ID VAULT_SECRET_ID VAULT_TOKEN
|
||||
set -x
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
|
||||
.
|
||||
TEST_SUITE=${TEST_SUITE-oss}
|
||||
NODE_NAME=instance
|
||||
|
||||
NODE_NAME="es1"
|
||||
repo=$(pwd)
|
||||
testnodecrt="/.ci/certs/testnode.crt"
|
||||
testnodekey="/.ci/certs/testnode.key"
|
||||
cacrt="/.ci/certs/ca.crt"
|
||||
|
||||
elasticsearch_image="elasticsearch"
|
||||
elasticsearch_url="https://elastic:changeme@${NODE_NAME}:9200"
|
||||
elasticsearch_image=elasticsearch
|
||||
elasticsearch_url=https://elastic:changeme@${NODE_NAME}:9200
|
||||
if [[ $TEST_SUITE != "xpack" ]]; then
|
||||
elasticsearch_image="elasticsearch-oss"
|
||||
elasticsearch_url="http://${NODE_NAME}:9200"
|
||||
elasticsearch_image=elasticsearch-${TEST_SUITE}
|
||||
elasticsearch_url=http://${NODE_NAME}:9200
|
||||
fi
|
||||
|
||||
ELASTICSEARCH_VERSION="${elasticsearch_image}:${ELASTICSEARCH_VERSION}" \
|
||||
NODE_NAME="${NODE_NAME}" \
|
||||
NETWORK_NAME="esnet" \
|
||||
function cleanup {
|
||||
status=$?
|
||||
set +x
|
||||
ELASTICSEARCH_VERSION=${elasticsearch_image}:${ELASTICSEARCH_VERSION} \
|
||||
NODE_NAME=${NODE_NAME} \
|
||||
NETWORK_NAME=elasticsearch \
|
||||
CLEANUP=true \
|
||||
bash ./.ci/run-elasticsearch.sh
|
||||
# Report status and exit
|
||||
if [[ "$status" == "0" ]]; then
|
||||
echo -e "\n\033[32;1mSUCCESS run-tests\033[0m"
|
||||
exit 0
|
||||
else
|
||||
echo -e "\n\033[31;1mFAILURE during run-tests\033[0m"
|
||||
exit ${status}
|
||||
fi
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
echo -e "\033[1m>>>>> Start [$ELASTICSEARCH_VERSION container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
ELASTICSEARCH_VERSION=${elasticsearch_image}:${ELASTICSEARCH_VERSION} \
|
||||
NODE_NAME=${NODE_NAME} \
|
||||
NETWORK_NAME=elasticsearch \
|
||||
DETACH=true \
|
||||
SSL_CERT="${repo}${testnodecrt}" \
|
||||
SSL_KEY="${repo}${testnodekey}" \
|
||||
SSL_CA="${repo}${cacrt}" \
|
||||
bash .ci/run-elasticsearch.sh
|
||||
|
||||
docker run \
|
||||
--network=esnet \
|
||||
--env "TEST_ES_SERVER=${elasticsearch_url}" \
|
||||
--env "CODECOV_TOKEN" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run ci
|
||||
echo -e "\033[1m>>>>> Repository specific tests >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
ELASTICSEARCH_CONTAINER=${elasticsearch_image}:${ELASTICSEARCH_VERSION} \
|
||||
NETWORK_NAME=elasticsearch \
|
||||
NODE_NAME=${NODE_NAME} \
|
||||
ELASTICSEARCH_URL=${elasticsearch_url} \
|
||||
bash .ci/run-repository.sh
|
||||
|
||||
@ -19,7 +19,7 @@
|
||||
"test": "npm run lint && npm run test:unit && npm run test:behavior && npm run test:types",
|
||||
"test:unit": "tap test/unit/*.test.js -t 300 --no-coverage",
|
||||
"test:behavior": "tap test/behavior/*.test.js -t 300 --no-coverage",
|
||||
"test:integration": "tap test/integration/index.js -T --no-coverage",
|
||||
"test:integration": "node test/integration/index.js",
|
||||
"test:types": "tsc --project ./test/types/tsconfig.json",
|
||||
"test:coverage": "nyc tap test/unit/*.test.js test/behavior/*.test.js -t 300 && nyc report --reporter=text-lcov > coverage.lcov && codecov",
|
||||
"lint": "standard",
|
||||
@ -44,6 +44,7 @@
|
||||
"dedent": "^0.7.0",
|
||||
"deepmerge": "^4.0.0",
|
||||
"dezalgo": "^1.0.3",
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"js-yaml": "^3.13.1",
|
||||
"license-checker": "^25.0.1",
|
||||
"lolex": "^4.0.1",
|
||||
|
||||
@ -8,16 +8,20 @@ const { readFileSync, accessSync, mkdirSync, readdirSync, statSync } = require('
|
||||
const { join, sep } = require('path')
|
||||
const yaml = require('js-yaml')
|
||||
const Git = require('simple-git')
|
||||
const tap = require('tap')
|
||||
const { Client } = require('../../index')
|
||||
const TestRunner = require('./test-runner')
|
||||
const build = require('./test-runner')
|
||||
const { sleep } = require('./helper')
|
||||
const ms = require('ms')
|
||||
|
||||
const esRepo = 'https://github.com/elastic/elasticsearch.git'
|
||||
const esFolder = join(__dirname, '..', '..', 'elasticsearch')
|
||||
const yamlFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'test')
|
||||
const xPackYamlFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'test')
|
||||
|
||||
const MAX_API_TIME = 1000 * 90
|
||||
const MAX_FILE_TIME = 1000 * 30
|
||||
const MAX_TEST_TIME = 1000 * 2
|
||||
|
||||
const ossSkips = {
|
||||
'cat.indices/10_basic.yml': ['Test cat indices output for closed index (pre 7.2.0)'],
|
||||
'cluster.health/10_basic.yml': ['cluster health with closed index (pre 7.2.0)'],
|
||||
@ -68,235 +72,250 @@ const xPackBlackList = {
|
||||
'xpack/15_basic.yml': ['*']
|
||||
}
|
||||
|
||||
class Runner {
|
||||
constructor (opts = {}) {
|
||||
const options = { node: opts.node }
|
||||
if (opts.isXPack) {
|
||||
options.ssl = {
|
||||
ca: readFileSync(join(__dirname, '..', '..', '.ci', 'certs', 'ca.crt'), 'utf8'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
}
|
||||
this.client = new Client(options)
|
||||
console.log('Loading yaml suite')
|
||||
}
|
||||
|
||||
async waitCluster (client, times = 0) {
|
||||
try {
|
||||
await client.cluster.health({ waitForStatus: 'green', timeout: '50s' })
|
||||
} catch (err) {
|
||||
if (++times < 10) {
|
||||
await sleep(5000)
|
||||
return this.waitCluster(client, times)
|
||||
}
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
function runner (opts = {}) {
|
||||
const options = { node: opts.node }
|
||||
if (opts.isXPack) {
|
||||
options.ssl = {
|
||||
ca: readFileSync(join(__dirname, '..', '..', '.ci', 'certs', 'ca.crt'), 'utf8'),
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
}
|
||||
const client = new Client(options)
|
||||
log('Loading yaml suite')
|
||||
start({ client, isXPack: opts.isXPack })
|
||||
.catch(console.log)
|
||||
}
|
||||
|
||||
async start ({ isXPack }) {
|
||||
const { client } = this
|
||||
const parse = this.parse.bind(this)
|
||||
async function waitCluster (client, times = 0) {
|
||||
try {
|
||||
await client.cluster.health({ waitForStatus: 'green', timeout: '50s' })
|
||||
} catch (err) {
|
||||
if (++times < 10) {
|
||||
await sleep(5000)
|
||||
return waitCluster(client, times)
|
||||
}
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Waiting for Elasticsearch')
|
||||
await this.waitCluster(client)
|
||||
async function start ({ client, isXPack }) {
|
||||
log('Waiting for Elasticsearch')
|
||||
await waitCluster(client)
|
||||
|
||||
const { body } = await client.info()
|
||||
const { number: version, build_hash: sha } = body.version
|
||||
const { body } = await client.info()
|
||||
const { number: version, build_hash: sha } = body.version
|
||||
|
||||
console.log(`Checking out sha ${sha}...`)
|
||||
await this.withSHA(sha)
|
||||
log(`Checking out sha ${sha}...`)
|
||||
await withSHA(sha)
|
||||
|
||||
console.log(`Testing ${isXPack ? 'XPack' : 'oss'} api...`)
|
||||
log(`Testing ${isXPack ? 'XPack' : 'oss'} api...`)
|
||||
|
||||
const folders = []
|
||||
.concat(getAllFiles(yamlFolder))
|
||||
.concat(isXPack ? getAllFiles(xPackYamlFolder) : [])
|
||||
.filter(t => !/(README|TODO)/g.test(t))
|
||||
// we cluster the array based on the folder names,
|
||||
// to provide a better test log output
|
||||
.reduce((arr, file) => {
|
||||
const path = file.slice(file.indexOf('/rest-api-spec/test'), file.lastIndexOf('/'))
|
||||
var inserted = false
|
||||
for (var i = 0; i < arr.length; i++) {
|
||||
if (arr[i][0].includes(path)) {
|
||||
inserted = true
|
||||
arr[i].push(file)
|
||||
break
|
||||
const folders = getAllFiles(isXPack ? xPackYamlFolder : yamlFolder)
|
||||
.filter(t => !/(README|TODO)/g.test(t))
|
||||
// we cluster the array based on the folder names,
|
||||
// to provide a better test log output
|
||||
.reduce((arr, file) => {
|
||||
const path = file.slice(file.indexOf('/rest-api-spec/test'), file.lastIndexOf('/'))
|
||||
var inserted = false
|
||||
for (var i = 0; i < arr.length; i++) {
|
||||
if (arr[i][0].includes(path)) {
|
||||
inserted = true
|
||||
arr[i].push(file)
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!inserted) arr.push([file])
|
||||
return arr
|
||||
}, [])
|
||||
|
||||
const totalTime = now()
|
||||
for (const folder of folders) {
|
||||
// pretty name
|
||||
const apiName = folder[0].slice(
|
||||
folder[0].indexOf(`${sep}rest-api-spec${sep}test`) + 19,
|
||||
folder[0].lastIndexOf(sep)
|
||||
)
|
||||
|
||||
log('Testing ' + apiName.slice(1))
|
||||
const apiTime = now()
|
||||
|
||||
for (const file of folder) {
|
||||
const testRunner = build({
|
||||
client,
|
||||
version,
|
||||
isXPack: file.includes('x-pack')
|
||||
})
|
||||
const fileTime = now()
|
||||
const data = readFileSync(file, 'utf8')
|
||||
// get the test yaml (as object), some file has multiple yaml documents inside,
|
||||
// every document is separated by '---', so we split on the separator
|
||||
// and then we remove the empty strings, finally we parse them
|
||||
const tests = data
|
||||
.split('\n---\n')
|
||||
.map(s => s.trim())
|
||||
.filter(Boolean)
|
||||
.map(parse)
|
||||
|
||||
// get setup and teardown if present
|
||||
var setupTest = null
|
||||
var teardownTest = null
|
||||
for (const test of tests) {
|
||||
if (test.setup) setupTest = test.setup
|
||||
if (test.teardown) teardownTest = test.teardown
|
||||
}
|
||||
|
||||
const cleanPath = file.slice(file.lastIndexOf(apiName))
|
||||
log(' ' + cleanPath)
|
||||
|
||||
for (const test of tests) {
|
||||
const testTime = now()
|
||||
const name = Object.keys(test)[0]
|
||||
if (name === 'setup' || name === 'teardown') continue
|
||||
if (shouldSkip(isXPack, file, name)) continue
|
||||
log(' - ' + name)
|
||||
try {
|
||||
await testRunner.run(setupTest, test[name], teardownTest)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
const totalTestTime = now() - testTime
|
||||
if (totalTestTime > MAX_TEST_TIME) {
|
||||
log(' took too long: ' + ms(totalTestTime))
|
||||
} else {
|
||||
log(' took: ' + ms(totalTestTime))
|
||||
}
|
||||
}
|
||||
const totalFileTime = now() - fileTime
|
||||
if (totalFileTime > MAX_FILE_TIME) {
|
||||
log(` ${cleanPath} took too long: ` + ms(totalFileTime))
|
||||
} else {
|
||||
log(` ${cleanPath} took: ` + ms(totalFileTime))
|
||||
}
|
||||
}
|
||||
const totalApiTime = now() - apiTime
|
||||
if (totalApiTime > MAX_API_TIME) {
|
||||
log(`${apiName} took too long: ` + ms(totalApiTime))
|
||||
} else {
|
||||
log(`${apiName} took: ` + ms(totalApiTime))
|
||||
}
|
||||
}
|
||||
log(`Total testing time: ${ms(now() - totalTime)}`)
|
||||
}
|
||||
|
||||
function log (text) {
|
||||
process.stdout.write(text + '\n')
|
||||
}
|
||||
|
||||
function now () {
|
||||
var ts = process.hrtime()
|
||||
return (ts[0] * 1e3) + (ts[1] / 1e6)
|
||||
}
|
||||
|
||||
function parse (data) {
|
||||
try {
|
||||
var doc = yaml.safeLoad(data)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the elasticsearch repository to the given sha.
|
||||
* If the repository is not present in `esFolder` it will
|
||||
* clone the repository and the checkout the sha.
|
||||
* If the repository is already present but it cannot checkout to
|
||||
* the given sha, it will perform a pull and then try again.
|
||||
* @param {string} sha
|
||||
* @param {function} callback
|
||||
*/
|
||||
function withSHA (sha) {
|
||||
return new Promise((resolve, reject) => {
|
||||
_withSHA(err => err ? reject(err) : resolve())
|
||||
})
|
||||
|
||||
function _withSHA (callback) {
|
||||
var fresh = false
|
||||
var retry = 0
|
||||
|
||||
if (!pathExist(esFolder)) {
|
||||
if (!createFolder(esFolder)) {
|
||||
return callback(new Error('Failed folder creation'))
|
||||
}
|
||||
fresh = true
|
||||
}
|
||||
|
||||
const git = Git(esFolder)
|
||||
|
||||
if (fresh) {
|
||||
clone(checkout)
|
||||
} else {
|
||||
checkout()
|
||||
}
|
||||
|
||||
function checkout () {
|
||||
log(`Checking out sha '${sha}'`)
|
||||
git.checkout(sha, err => {
|
||||
if (err) {
|
||||
if (retry++ > 0) {
|
||||
return callback(err)
|
||||
}
|
||||
return pull(checkout)
|
||||
}
|
||||
if (!inserted) arr.push([file])
|
||||
return arr
|
||||
}, [])
|
||||
|
||||
for (const folder of folders) {
|
||||
// pretty name
|
||||
const apiName = folder[0].slice(
|
||||
folder[0].indexOf(`${sep}rest-api-spec${sep}test`) + 19,
|
||||
folder[0].lastIndexOf(sep)
|
||||
)
|
||||
|
||||
tap.test(`Testing ${apiName}`, { bail: true, timeout: 0 }, t => {
|
||||
for (const file of folder) {
|
||||
const data = readFileSync(file, 'utf8')
|
||||
// get the test yaml (as object), some file has multiple yaml documents inside,
|
||||
// every document is separated by '---', so we split on the separator
|
||||
// and then we remove the empty strings, finally we parse them
|
||||
const tests = data
|
||||
.split('\n---\n')
|
||||
.map(s => s.trim())
|
||||
.filter(Boolean)
|
||||
.map(parse)
|
||||
|
||||
t.test(
|
||||
file.slice(file.lastIndexOf(apiName)),
|
||||
testFile(file, tests)
|
||||
)
|
||||
}
|
||||
t.end()
|
||||
callback()
|
||||
})
|
||||
}
|
||||
|
||||
function testFile (file, tests) {
|
||||
return t => {
|
||||
// get setup and teardown if present
|
||||
var setupTest = null
|
||||
var teardownTest = null
|
||||
for (const test of tests) {
|
||||
if (test.setup) setupTest = test.setup
|
||||
if (test.teardown) teardownTest = test.teardown
|
||||
function pull (cb) {
|
||||
log('Pulling elasticsearch repository...')
|
||||
git.pull(err => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
|
||||
tests.forEach(test => {
|
||||
const name = Object.keys(test)[0]
|
||||
if (name === 'setup' || name === 'teardown') return
|
||||
if (shouldSkip(t, isXPack, file, name)) return
|
||||
|
||||
// create a subtest for the specific folder + test file + test name
|
||||
t.test(name, async t => {
|
||||
const testRunner = new TestRunner({
|
||||
client,
|
||||
version,
|
||||
tap: t,
|
||||
isXPack: file.includes('x-pack')
|
||||
})
|
||||
await testRunner.run(setupTest, test[name], teardownTest)
|
||||
})
|
||||
})
|
||||
t.end()
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
parse (data) {
|
||||
try {
|
||||
var doc = yaml.safeLoad(data)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
getTest (folder) {
|
||||
const tests = readdirSync(folder)
|
||||
return tests.filter(t => !/(README|TODO)/g.test(t))
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the elasticsearch repository to the given sha.
|
||||
* If the repository is not present in `esFolder` it will
|
||||
* clone the repository and the checkout the sha.
|
||||
* If the repository is already present but it cannot checkout to
|
||||
* the given sha, it will perform a pull and then try again.
|
||||
* @param {string} sha
|
||||
* @param {function} callback
|
||||
*/
|
||||
withSHA (sha) {
|
||||
return new Promise((resolve, reject) => {
|
||||
_withSHA.call(this, err => err ? reject(err) : resolve())
|
||||
})
|
||||
|
||||
function _withSHA (callback) {
|
||||
var fresh = false
|
||||
var retry = 0
|
||||
|
||||
if (!this.pathExist(esFolder)) {
|
||||
if (!this.createFolder(esFolder)) {
|
||||
return callback(new Error('Failed folder creation'))
|
||||
function clone (cb) {
|
||||
log('Cloning elasticsearch repository...')
|
||||
git.clone(esRepo, esFolder, err => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
fresh = true
|
||||
}
|
||||
|
||||
const git = Git(esFolder)
|
||||
|
||||
if (fresh) {
|
||||
clone(checkout)
|
||||
} else {
|
||||
checkout()
|
||||
}
|
||||
|
||||
function checkout () {
|
||||
console.log(`Checking out sha '${sha}'`)
|
||||
git.checkout(sha, err => {
|
||||
if (err) {
|
||||
if (retry++ > 0) {
|
||||
return callback(err)
|
||||
}
|
||||
return pull(checkout)
|
||||
}
|
||||
callback()
|
||||
})
|
||||
}
|
||||
|
||||
function pull (cb) {
|
||||
console.log('Pulling elasticsearch repository...')
|
||||
git.pull(err => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
||||
function clone (cb) {
|
||||
console.log('Cloning elasticsearch repository...')
|
||||
git.clone(esRepo, esFolder, err => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given path exists
|
||||
* @param {string} path
|
||||
* @returns {boolean} true if exists, false if not
|
||||
*/
|
||||
pathExist (path) {
|
||||
try {
|
||||
accessSync(path)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
/**
|
||||
* Checks if the given path exists
|
||||
* @param {string} path
|
||||
* @returns {boolean} true if exists, false if not
|
||||
*/
|
||||
function pathExist (path) {
|
||||
try {
|
||||
accessSync(path)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the given folder
|
||||
* @param {string} name
|
||||
* @returns {boolean} true on success, false on failure
|
||||
*/
|
||||
createFolder (name) {
|
||||
try {
|
||||
mkdirSync(name)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
/**
|
||||
* Creates the given folder
|
||||
* @param {string} name
|
||||
* @returns {boolean} true on success, false on failure
|
||||
*/
|
||||
function createFolder (name) {
|
||||
try {
|
||||
mkdirSync(name)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
@ -306,18 +325,17 @@ if (require.main === module) {
|
||||
node,
|
||||
isXPack: node.indexOf('@') > -1
|
||||
}
|
||||
const runner = new Runner(opts)
|
||||
runner.start(opts).catch(console.log)
|
||||
runner(opts)
|
||||
}
|
||||
|
||||
const shouldSkip = (t, isXPack, file, name) => {
|
||||
const shouldSkip = (isXPack, file, name) => {
|
||||
var list = Object.keys(ossSkips)
|
||||
for (var i = 0; i < list.length; i++) {
|
||||
const ossTest = ossSkips[list[i]]
|
||||
for (var j = 0; j < ossTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === ossTest[j] || ossTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
t.comment(`Skipping test ${testName} because is blacklisted in the oss test`)
|
||||
log(`Skipping test ${testName} because is blacklisted in the oss test`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -330,7 +348,7 @@ const shouldSkip = (t, isXPack, file, name) => {
|
||||
for (j = 0; j < platTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
t.comment(`Skipping test ${testName} because is blacklisted in the XPack test`)
|
||||
log(`Skipping test ${testName} because is blacklisted in the XPack test`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -347,4 +365,4 @@ const getAllFiles = dir =>
|
||||
return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name]
|
||||
}, [])
|
||||
|
||||
module.exports = Runner
|
||||
module.exports = runner
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user