From 226d0e34982b89fbb01088f52ee2a6c9f108d869 Mon Sep 17 00:00:00 2001 From: Tomas Della Vedova Date: Wed, 31 Mar 2021 13:57:58 +0200 Subject: [PATCH] Move integration test to artifact API (#1436) --- .ci/Dockerfile | 4 + package.json | 4 +- scripts/download-artifacts.js | 159 ++++++++++++++++++++++++++++++++++ test/integration/index.js | 115 ++---------------------- 4 files changed, 175 insertions(+), 107 deletions(-) create mode 100644 scripts/download-artifacts.js diff --git a/.ci/Dockerfile b/.ci/Dockerfile index b25ea6f66..9e3716246 100644 --- a/.ci/Dockerfile +++ b/.ci/Dockerfile @@ -4,6 +4,10 @@ FROM node:${NODE_JS_VERSION} # Create app directory WORKDIR /usr/src/app +RUN apt-get clean -y +RUN apt-get update -y +RUN apt-get install -y zip + # Install app dependencies COPY package*.json ./ RUN npm install diff --git a/package.json b/package.json index 6dfdcc501..a4daa4c08 100644 --- a/package.json +++ b/package.json @@ -50,6 +50,7 @@ "@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1", "@types/node": "^14.14.28", "convert-hrtime": "^3.0.0", + "cross-zip": "^4.0.0", "dedent": "^0.7.0", "deepmerge": "^4.2.2", "dezalgo": "^1.0.3", @@ -58,6 +59,7 @@ "js-yaml": "^4.0.0", "license-checker": "^25.0.1", "minimist": "^1.2.5", + "node-fetch": "^2.6.1", "ora": "^5.3.0", "pretty-hrtime": "^1.0.3", "proxy": "^1.0.2", @@ -102,4 +104,4 @@ "coverage": false, "jobs-auto": true } -} \ No newline at end of file +} diff --git a/scripts/download-artifacts.js b/scripts/download-artifacts.js new file mode 100644 index 000000000..e1d4c5a5d --- /dev/null +++ b/scripts/download-artifacts.js @@ -0,0 +1,159 @@ +'use strict' + +const { join } = require('path') +const minimist = require('minimist') +const stream = require('stream') +const { promisify } = require('util') +const { createWriteStream, promises } = require('fs') +const rimraf = require('rimraf') +const fetch = require('node-fetch') +const crossZip = require('cross-zip') +const ora = require('ora') + +const { mkdir, writeFile } = promises +const pipeline = promisify(stream.pipeline) +const unzip = promisify(crossZip.unzip) +const rm = promisify(rimraf) + +const esFolder = join(__dirname, '..', 'elasticsearch') +const zipFolder = join(esFolder, 'artifacts.zip') +const specFolder = join(esFolder, 'rest-api-spec', 'api') +const freeTestFolder = join(esFolder, 'rest-api-spec', 'test', 'free') +const xPackTestFolder = join(esFolder, 'rest-api-spec', 'test', 'platinum') +const artifactInfo = join(esFolder, 'info.json') + +async function downloadArtifacts (opts) { + if (typeof opts.version !== 'string') { + throw new Error('Missing version') + } + + const log = ora('Checking out spec and test').start() + + log.text = 'Resolving versions' + let resolved + try { + resolved = await resolve(opts.version, opts.hash) + } catch (err) { + log.fail(err.message) + process.exit(1) + } + + opts.id = opts.id || resolved.id + opts.hash = opts.hash || resolved.hash + opts.version = resolved.version + + const info = loadInfo() + + if (info && info.version === opts.version) { + if (info.hash === opts.hash && info.id === opts.id) { + log.succeed('The artifact copy present locally is already up to date') + return + } + } + + log.text = 'Cleanup checkouts/elasticsearch' + await rm(esFolder) + await mkdir(esFolder, { recursive: true }) + + log.text = 'Downloading artifacts' + const response = await fetch(resolved.url) + if (!response.ok) { + log.fail(`unexpected response ${response.statusText}`) + process.exit(1) + } + await pipeline(response.body, createWriteStream(zipFolder)) + + log.text = 'Unzipping' + await unzip(zipFolder, esFolder) + + log.text = 'Cleanup' + await rm(zipFolder) + + log.text = 'Update info' + await writeFile(artifactInfo, JSON.stringify(opts), 'utf8') + + log.succeed('Done') +} + +function loadInfo () { + try { + return require(artifactInfo) + } catch (err) { + return null + } +} + +async function resolve (version, hash) { + const response = await fetch(`https://artifacts-api.elastic.co/v1/versions/${version}`) + if (!response.ok) { + throw new Error(`unexpected response ${response.statusText}`) + } + + const data = await response.json() + const esBuilds = data.version.builds + .filter(build => build.projects.elasticsearch != null) + .map(build => { + return { + projects: build.projects.elasticsearch, + buildId: build.build_id, + date: build.start_time, + version: build.version + } + }) + .sort((a, b) => { + const dA = new Date(a.date) + const dB = new Date(b.date) + if (dA > dB) return -1 + if (dA < dB) return 1 + return 0 + }) + + if (hash != null) { + const build = esBuilds.find(build => build.projects.commit_hash === hash) + if (!build) { + throw new Error(`Can't find any build with hash '${hash}'`) + } + const zipKey = Object.keys(build.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip')) + return { + url: build.projects.packages[zipKey].url, + id: build.buildId, + hash: build.projects.commit_hash, + version: build.version + } + } + + const lastBuild = esBuilds[0] + const zipKey = Object.keys(lastBuild.projects.packages).find(key => key.startsWith('rest-resources-zip-') && key.endsWith('.zip')) + return { + url: lastBuild.projects.packages[zipKey].url, + id: lastBuild.buildId, + hash: lastBuild.projects.commit_hash, + version: lastBuild.version + } +} + +async function main (options) { + delete options._ + await downloadArtifacts(options) +} +if (require.main === module) { + process.on('unhandledRejection', function (err) { + console.error(err) + process.exit(1) + }) + + const options = minimist(process.argv.slice(2), { + string: ['id', 'version', 'hash'] + }) + main(options).catch(t => { + console.log(t) + process.exit(2) + }) +} + +module.exports = downloadArtifacts +module.exports.locations = { + specFolder, + freeTestFolder, + xPackTestFolder +} diff --git a/test/integration/index.js b/test/integration/index.js index 29d1a65f7..934552101 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -24,20 +24,18 @@ process.on('unhandledRejection', function (err) { process.exit(1) }) -const { writeFileSync, readFileSync, accessSync, mkdirSync, readdirSync, statSync } = require('fs') +const { writeFileSync, readFileSync, readdirSync, statSync } = require('fs') const { join, sep } = require('path') const yaml = require('js-yaml') -const Git = require('simple-git') const ms = require('ms') const { Client } = require('../../index') const build = require('./test-runner') const { sleep } = require('./helper') const createJunitReporter = require('./reporter') +const downloadArtifacts = require('../../scripts/download-artifacts') -const esRepo = 'https://github.com/elastic/elasticsearch.git' -const esFolder = join(__dirname, '..', '..', 'elasticsearch') -const yamlFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'test') -const xPackYamlFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'test') +const yamlFolder = downloadArtifacts.locations.freeTestFolder +const xPackYamlFolder = downloadArtifacts.locations.xPackTestFolder const MAX_API_TIME = 1000 * 90 const MAX_FILE_TIME = 1000 * 30 @@ -84,6 +82,7 @@ const platinumBlackList = { ], // The cleanup fails with a index not found when retrieving the jobs 'ml/get_datafeed_stats.yml': ['Test get datafeed stats when total_search_time_ms mapping is missing'], + 'ml/preview_datafeed.yml': ['*'], // Investigate why is failing 'ml/inference_crud.yml': ['*'], // investigate why this is failing @@ -169,10 +168,10 @@ async function start ({ client, isXPack }) { await waitCluster(client) const { body } = await client.info() - const { number: version, build_hash: sha } = body.version + const { number: version, build_hash: hash } = body.version - log(`Checking out sha ${sha}...`) - await withSHA(sha) + log(`Downloading artifacts for hash ${hash}...`) + await downloadArtifacts({ hash, version }) log(`Testing ${isXPack ? 'Platinum' : 'Free'} api...`) const junit = createJunitReporter() @@ -217,7 +216,7 @@ async function start ({ client, isXPack }) { const testRunner = build({ client, version, - isXPack: file.includes('x-pack') + isXPack: file.includes('platinum') }) const fileTime = now() const data = readFileSync(file, 'utf8') @@ -325,102 +324,6 @@ function parse (data) { return doc } -/** - * Sets the elasticsearch repository to the given sha. - * If the repository is not present in `esFolder` it will - * clone the repository and the checkout the sha. - * If the repository is already present but it cannot checkout to - * the given sha, it will perform a pull and then try again. - * @param {string} sha - * @param {function} callback - */ -function withSHA (sha) { - return new Promise((resolve, reject) => { - _withSHA(err => err ? reject(err) : resolve()) - }) - - function _withSHA (callback) { - let fresh = false - let retry = 0 - - if (!pathExist(esFolder)) { - if (!createFolder(esFolder)) { - return callback(new Error('Failed folder creation')) - } - fresh = true - } - - const git = Git(esFolder) - - if (fresh) { - clone(checkout) - } else { - checkout() - } - - function checkout () { - log(`Checking out sha '${sha}'`) - git.checkout(sha, err => { - if (err) { - if (retry++ > 0) { - return callback(err) - } - return pull(checkout) - } - callback() - }) - } - - function pull (cb) { - log('Pulling elasticsearch repository...') - git.pull(err => { - if (err) { - return callback(err) - } - cb() - }) - } - - function clone (cb) { - log('Cloning elasticsearch repository...') - git.clone(esRepo, esFolder, err => { - if (err) { - return callback(err) - } - cb() - }) - } - } -} - -/** - * Checks if the given path exists - * @param {string} path - * @returns {boolean} true if exists, false if not - */ -function pathExist (path) { - try { - accessSync(path) - return true - } catch (err) { - return false - } -} - -/** - * Creates the given folder - * @param {string} name - * @returns {boolean} true on success, false on failure - */ -function createFolder (name) { - try { - mkdirSync(name) - return true - } catch (err) { - return false - } -} - function generateJunitXmlReport (junit, suite) { writeFileSync( join(__dirname, '..', '..', `${suite}-report-junit.xml`),