remove packages to get npm audit to pass (#11)

* remove packages to get `npm audit` to pass

* fix grunt.config.init() call lost in split up

* remove integration tests, they don't work and nobody is running them

* fix upload_to_s3 task after refactor
This commit is contained in:
Spencer
2019-07-10 07:22:27 -07:00
committed by GitHub
parent 7c1573fb07
commit 58a29395e2
36 changed files with 151 additions and 2509 deletions

3
.gitignore vendored
View File

@ -3,15 +3,12 @@ dist
npm-debug.log
node_modules
scripts/scratch*
test/integration/yaml_suite/log
.aws-config.json
.idea
esvm
.es-snapshot-repos
## generated files
test/integration/yaml_suite/yaml_tests*.json
test/integration/yaml_suite/index*.js
src/_elasticsearch*
src/bower*/
junit-*.xml

View File

@ -3,14 +3,11 @@ bower_components
npm-debug.log
node_modules
scripts/scratch*
test/integration/yaml_suite/log
.aws-config.json
.idea
esvm
## generated files
test/integration/yaml_suite/yaml_tests*.json
test/integration/yaml_suite/index*.js
src/_elasticsearch*/
src/bower*/
junit-*.xml

View File

@ -12,45 +12,18 @@ Please write test cases to exercise your changes.
### 3. When you are ready, run the test suite
1. Install dev dependencies.
1. Install dependencies.
```sh
npm install
yarn
```
2. Install Grunt.
2. Run the unit tests.
```sh
npm install -g grunt-cli
yarn grunt test
```
3. Run the tests. Testing the code will lint the project, run the unit tests, install local versions of elasticsearch, and run the integration tests using those installations.
```sh
grunt test
```
Or to skip the integration tests:
```sh
grunt unit_test
```
4. Optionally, run the browser tests. While this step is automated and simple, it can take several minutes for the tests to complete. Unless you are making changes to browser specific portions of the code you can probably let Travis run the browser tests for you.
Quick Option: Run them locally in your browser
```sh
grunt browser_clients:build
node ./test/utils/server
# open http://127.0.0.1:8000/unit.html
# open http://127.0.0.1:8000/builds.html
```
Run them on Sauce Labs across several browsers, operating systems, and browser versions
```sh
grunt browser_clients:test
```
### 4. Submit a pull request
Push your local changes a forked copy of the repository and submit a pull request. In the pull request, describe what your changes do and be sure to link to any conversations regarding this implementation, eg "Closes #123".

View File

@ -2,23 +2,35 @@ const { resolve } = require('path');
module.exports = function(grunt) {
// load plugins
require('load-grunt-config')(grunt, {
configPath: resolve(__dirname, 'grunt/config'),
init: true,
config: {
distDir: resolve(__dirname, 'dist'),
bowerSubmodule: resolve(__dirname, 'src/bower_es_js'),
meta: {
banner:
'/*! <%= package.name %> - v<%= package.version %> - ' +
'<%= grunt.template.today("yyyy-mm-dd") %>\n' +
'<%= package.homepage ? " * " + package.homepage + "\\n" : "" %>' +
' * Copyright (c) <%= grunt.template.today("yyyy") %> <%= package.author.company %>;' +
' Licensed <%= package.license %> */\n',
},
grunt.config.init({
distDir: resolve(__dirname, 'dist'),
bowerSubmodule: resolve(__dirname, 'src/bower_es_js'),
meta: {
banner:
'/*! <%= package.name %> - v<%= package.version %> - ' +
'<%= grunt.template.today("yyyy-mm-dd") %>\n' +
'<%= package.homepage ? " * " + package.homepage + "\\n" : "" %>' +
' * Copyright (c) <%= grunt.template.today("yyyy") %> <%= package.author.company %>;' +
' Licensed <%= package.license %> */\n',
},
clean: require('./grunt/config/clean'),
compress: require('./grunt/config/compress'),
concat: require('./grunt/config/concat'),
copy: require('./grunt/config/copy'),
run: require('./grunt/config/run'),
uglify: require('./grunt/config/uglify'),
webpack: require('./grunt/config/webpack'),
});
// load task definitions
grunt.loadTasks('grunt');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-compress');
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-run');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-webpack');
require('./grunt/browser_clients')(grunt);
require('./grunt/tasks')(grunt);
};

View File

@ -1,14 +1,11 @@
const Fs = require('fs');
const { resolve } = require('path');
const readline = require('readline');
const chalk = require('chalk');
const AWS = require('aws-sdk');
module.exports = function(grunt) {
grunt.registerTask('browser_clients:dev', [
'run:browser_test_server:keepalive',
]);
grunt.registerTask('browser_clients:test', [
'browser_clients:build',
'run:browser_test_server',
'saucelabs-mocha:all',
]);
grunt.registerTask('browser_clients:build', function() {
// prevent this from running more than once accidentally
grunt.task.renameTask('browser_clients:build', 'browser_clients:rebuild');
@ -31,8 +28,7 @@ module.exports = function(grunt) {
]);
grunt.registerTask('browser_clients:release', [
'prompt:confirm_release',
'_check_for_confirmation',
'prompt_confirm_release',
'browser_clients:build',
'_upload_archive:release',
'run:clone_bower_repo',
@ -56,15 +52,68 @@ module.exports = function(grunt) {
'copy:dist_to_named_dir',
'compress:' + type + '_zip',
'compress:' + type + '_tarball',
'aws_s3:upload_archives',
'upload_to_s3',
]);
});
grunt.registerTask('_check_for_confirmation', function() {
if (grunt.config.get('confirm.release')) {
grunt.log.verbose.writeln('release confirmed');
} else {
throw new Error('Aborting release');
}
grunt.registerTask('prompt_confirm_release', function() {
const done = this.async();
const version = grunt.config.get('package.version');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
rl.question(
`Are you sure you want to ${chalk.bold(
'overwrite/release'
)} version ${chalk.bold(version)} of elasticsearch-js [Yn]: `,
resp => {
const answer = resp.trim().toLowerCase();
const confirm = answer === '' || answer === 'y' || answer === 'yes';
if (!confirm) {
grunt.fatal(new Error('Aborting release'));
}
rl.close();
done();
}
);
});
grunt.registerTask('upload_to_s3', function() {
const done = this.async();
Promise.resolve()
.then(async () => {
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_KEY,
secretAccessKey: process.env.AWS_SECRET,
});
const archivesDir = resolve(grunt.config.get('distDir'), 'archives');
const bucket = 'download.elasticsearch.org';
for (const name of Fs.readdirSync(archivesDir)) {
grunt.log.writeln(`Uploading ${name} to ${bucket}`);
await s3
.putObject({
ACL: 'public-read',
Body: Fs.createReadStream(resolve(archivesDir, name)),
Bucket: bucket,
Key: `elasticsearch/elasticsearch-js/${name}`,
ContentDisposition: 'attachment',
})
.promise();
grunt.log.ok(`${name} complete`);
}
done();
})
.catch(error => {
grunt.fatal(error);
});
});
};

View File

@ -1,21 +0,0 @@
module.exports = {
upload_archives: {
files: [
{
expand: true,
cwd: '<%= distDir %>/archives',
src: '*',
dest: 'elasticsearch/elasticsearch-js/',
},
],
options: {
accessKeyId: process.env.AWS_KEY,
secretAccessKey: process.env.AWS_SECRET,
bucket: 'download.elasticsearch.org',
access: 'public-read',
params: {
ContentDisposition: 'attachment',
},
},
},
};

View File

@ -1,113 +0,0 @@
var _ = require('lodash');
var gruntUtils = require('../utils');
var fromRoot = require('path').join.bind(null, __dirname, '..', '..');
var release = process.env.ES_RELEASE;
var ref = process.env.ES_REF;
var port = parseFloat(_.get(process.env, 'ES_PORT', 9400));
var host = _.get(process.env, 'ES_HOST', 'localhost');
var Version = require('../../scripts/Version');
var versionedOpts = [
{
version: '*',
directory: fromRoot('esvm'),
nodes: 1,
quiet: false,
logLevel: 'ERROR',
config: {
'path.data': fromRoot('esvm/data_dir'),
'node.name': 'elasticsearch_js_test_runner',
'cluster.name': 'elasticsearch_js_test_runners',
'http.port': port,
'network.host': host,
'discovery.zen.minimum_master_nodes': 1,
},
},
{
version: '<3',
config: {
'discovery.zen.ping.multicast.enabled': false,
},
},
{
version: '<1.6',
config: {
'discovery.zen.ping_timeout': 1,
},
},
{
version: '^1.2 <1.6',
config: {
'node.bench': true,
'script.disable_dynamic': false,
},
},
{
version: '>=1.6 <5.0',
config: {
'node.bench': true,
},
},
{
version: '>2.0 <5.0',
config: {
'node.testattr': 'test',
},
},
{
version: '>=5.0',
config: {
'node.attr.testattr': 'test',
},
},
{
version: '>=1.6 <5.0',
config: {
'script.indexed': true,
},
},
{
version: '>=1.6',
config: {
'script.inline': true,
'path.repo': process.env.ES_PATH_REPO || fromRoot('.es-snapshot-repos'),
'repositories.url.allowed_urls': 'http://snapshot.*',
},
},
];
// targets for each branch
gruntUtils.branches.forEach(function(branch) {
exports[branch] = {
options: Version.fromBranch(branch).mergeOpts(versionedOpts, {
branch: branch,
fresh: true,
}),
};
});
// ci target, based on env variables
(function() {
var v;
var opts = {
config: {
'http.port': port,
},
};
if (release) {
v = new Version(String(release).replace(/^v/, ''));
opts.version = v.version;
} else if (ref) {
v = Version.fromBranch(String(ref).replace(/v?(\d+\.\d+)\..+/, '$1'));
opts.branch = ref;
opts.fresh = true;
} else {
return;
}
exports.ci_env = {
options: v.mergeOpts(versionedOpts, opts),
};
})();

View File

@ -1,83 +0,0 @@
var root = require('find-root')(__dirname);
var rel = require('path').resolve.bind(null, root);
var rootReq = function(p) {
return require(rel(p));
};
var utils = rootReq('src/lib/utils');
var grunt = require('grunt');
var JENKINS_REPORTER = rel('test/utils/jenkins-reporter.js');
var config = {
unit: {
src: 'test/unit/index.js',
options: {
reporter: 'nyan',
},
},
ci_unit: {
src: 'test/unit/index.js',
options: {
reporter: 'spec',
},
},
jenkins_unit: {
src: 'test/unit/index.js',
options: {
reporter: JENKINS_REPORTER,
},
},
// run the unit tests, and update coverage.html
make_coverage_html: {
src: 'test/unit/coverage.js',
options: {
reporter: 'html-cov',
instrument: false,
},
},
// for use by travis
ship_coverage: {
src: 'test/unit/coverage.js',
options: {
reporter: 'mocha-lcov-reporter',
coveralls: true,
instrument: false,
},
},
integration: {
src: null,
options: {
reporter: 'spec',
},
},
jenkins_integration: {
src: null,
options: {
reporter: JENKINS_REPORTER,
},
},
};
grunt.registerTask('mocha_integration', function(branch) {
grunt.config.set(
'mochacov.integration.src',
'test/integration/yaml_suite/index_' + utils.snakeCase(branch) + '.js'
);
grunt.task.run('mochacov:integration');
});
grunt.registerTask('mocha_jenkins_integration', function(branch) {
grunt.config.set(
'mochacov.jenkins_integration.src',
'test/integration/yaml_suite/index_' + utils.snakeCase(branch) + '.js'
);
grunt.task.run('mochacov:jenkins_integration');
});
module.exports = config;

View File

@ -1,6 +0,0 @@
module.exports = {
coverage: {
path: 'coverage.html',
app: 'Google Chrome',
},
};

View File

@ -1,15 +0,0 @@
module.exports = {
confirm_release: {
options: {
questions: [
{
config: 'confirm.release',
type: 'confirm',
message:
'Are you sure you want to overwrite/release version <%= package.version %> of elasticsearch-js',
default: false,
},
],
},
},
};

View File

@ -1,20 +1,12 @@
var gruntUtils = require('../utils');
const { REPO_ROOT, branches } = require('../utils');
var config = {
module.exports = {
generate: {
exec: 'node ./scripts/generate/index.js',
options: {
passArgs: ['verbose'],
},
},
browser_test_server: {
exec: 'node ./test/utils/server',
options: {
wait: false,
quiet: false,
ready: /listening/,
},
},
clone_bower_repo: {
exec: [
'test -d src/bower_es_js',
@ -24,6 +16,7 @@ var config = {
quiet: true,
},
},
checkout_bower_repo_master: {
exec: 'git checkout master',
options: {
@ -31,6 +24,7 @@ var config = {
quiet: true,
},
},
checkout_bower_repo_prerelease: {
exec: 'git checkout prerelease',
options: {
@ -38,6 +32,7 @@ var config = {
quiet: true,
},
},
push_prerelease_branch: {
exec:
'git add -A && git commit -m "prerelease build" && git push origin prerelease',
@ -46,15 +41,20 @@ var config = {
quite: true,
},
},
release_bower_tag: {
exec: 'node ./scripts/release/bower',
},
mocha: {
cmd: process.execPath,
args: ['scripts/mocha'],
cwd: REPO_ROOT,
},
};
gruntUtils.branches.forEach(function(branch) {
config['generate_' + branch] = {
branches.forEach(function(branch) {
module.exports['generate_' + branch] = {
exec: 'node ./scripts/generate/index.js --branch=' + branch,
};
});
module.exports = config;

View File

@ -1,36 +0,0 @@
var slk = require('../../test/utils/slk');
module.exports = {
all: {
options: {
urls: [
'http://127.0.0.1:8000/unit.html',
'http://127.0.0.1:8000/builds.html',
],
testname: 'es.js client tests',
build: process.env.TRAVIS_BUILD_ID,
concurrency: 10,
username: slk.user,
key: slk.key,
browsers: [
{
browserName: 'googlechrome',
platform: 'XP',
},
{
browserName: 'firefox',
platform: 'Linux',
},
{
browserName: 'internet explorer',
version: '11',
platform: 'Windows 8.1',
},
{
browserName: 'internet explorer',
version: '10',
platform: 'Windows 8',
},
],
},
},
};

View File

@ -1,14 +0,0 @@
module.exports = {
source: {
files: [
'src/**/*.js',
'grunt/**/*.js',
'test/unit/**/*.js',
'Gruntfile.js',
],
tasks: ['mochacov:unit'],
options: {
interrupt: true,
},
},
};

View File

@ -4,35 +4,7 @@ module.exports = function(grunt) {
var readFile = Promise.promisify(require('fs').readFile);
var writeFile = Promise.promisify(require('fs').writeFile);
// Default task runs the build process.
grunt.registerTask('default', ['test']);
grunt.registerTask('test', function(branch) {
var tasks = [
branch ? 'run:generate_' + branch : 'run:generate',
'mochacov:unit',
];
var branches = branch ? [branch] : gruntUtils.branches;
process.env.ES_PORT = process.env.ES_PORT || 9400;
process.env.ES_HOST = process.env.ES_HOST || 'localhost';
branches.forEach(function(branch) {
tasks.push(
'esvm:' + branch,
'mocha_integration:' + branch,
'esvm_shutdown:' + branch
);
});
grunt.task.run(tasks);
});
grunt.registerTask('unit_test', 'mochacov:unit');
grunt.registerTask('coverage', [
'mochacov:make_coverage_html',
'open:coverage',
]);
grunt.registerTask('test', ['run:mocha']);
grunt.registerTask('version', function(nextVersion) {
var root = require('path').join.bind(null, __dirname, '..');

View File

@ -7,6 +7,8 @@ var unstable = pkg.config.unstable_es_branches;
var branches = [].concat(stable, unstable);
var gruntUtils = {
REPO_ROOT: root,
branchSuffix: function(branch) {
return branch === gruntUtils.branches._default
? ''

View File

@ -48,12 +48,14 @@
"@babel/preset-env": "^7.0.0-beta.41",
"aliasify": "^2.1.0",
"async": "~0.8.0",
"aws-sdk": "2.489.0",
"babel-eslint": "^8.2.2",
"babel-loader": "^8.0.0-beta",
"backport": "^3.0.2",
"blanket": "^1.2.3",
"bluebird": "^2.9.14",
"browserify": "^16.1.1",
"eslint": "6.0.1",
"eslint-config-prettier": "6.0.0",
"eslint-config-standard": "12.0.0",
"eslint-plugin-import": "2.18.0",
@ -61,38 +63,23 @@
"eslint-plugin-prettier": "3.1.0",
"eslint-plugin-promise": "4.2.1",
"eslint-plugin-standard": "4.0.0",
"eslint": "6.0.1",
"expect.js": "^0.3.1",
"express": "~3.4.7",
"fast-glob": "^3.0.4",
"find-root": "~0.1.1",
"glob": "~3.2.7",
"grunt": "^1.0.1",
"grunt-aws-s3": "^2.0.0",
"grunt-cli": "^1.2.0",
"grunt-contrib-clean": "^1.0.0",
"grunt-contrib-compress": "^1.2.0",
"grunt-contrib-concat": "^1.0.1",
"grunt-contrib-copy": "^1.0.0",
"grunt-contrib-uglify": "^1.0.1",
"grunt-contrib-watch": "^1.0.0",
"grunt-esvm": "^3.2.8",
"grunt-mocha-cov": "^0.4.0",
"grunt-open": "~0.2.2",
"grunt-prompt": "^1.3.3",
"grunt-run": "^0.6.0",
"grunt-saucelabs": "^8.6.2",
"grunt-run": "^0.8.1",
"grunt-webpack": "^3.1.1",
"jquery": "^3.3.1",
"js-yaml": "^3.6.0",
"load-grunt-config": "^0.19.2",
"load-grunt-tasks": "^3.5.0",
"mocha": "^2.2.5",
"mocha-lcov-reporter": "0.0.1",
"mocha-screencast-reporter": "~0.1.4",
"mocha": "^6.1.4",
"moment": "^2.13.0",
"nock": "^9.2.3",
"null-loader": "^0.1.1",
"open": "0.0.5",
"optimist": "~0.6.0",
"prettier": "1.18.2",
"semver": "^4.3.6",
@ -101,8 +88,7 @@
"through2": "~0.6.3",
"through2-map": "~1.4.0",
"webpack": "^3.10.0",
"webpack-dev-server": "^2.11.1",
"xmlbuilder": "~0.4.3"
"webpack-dev-server": "^2.11.1"
},
"license": "Apache-2.0",
"dependencies": {
@ -120,4 +106,4 @@
"engines": {
"node": ">=0.8"
}
}
}

View File

@ -1 +0,0 @@
require('backport');

View File

@ -1,352 +0,0 @@
/**
* Run the tests, and setup es if needed
*
* ENV VARS:
* RUN - a list of task names to run, specifying this turns of all other tasks
* ES_REF - the ES branch/tag we should use to generate the tests and download es
* ES_RELEASE - a specific ES release to download in use for testing
* ES_PORT - the port number we should run elasticsearch on
* ES_HOST - the hostname elasticsearch should bind to
* ES_V - a version identifier used by jenkins. don't use this
*
* Tasks:
* NODE_UNIT - run the unit tests in node (default: true)
* NODE_INTEGRATION - run the integration tests in node (default: true)
* SAUCE_LABS - run the browser tests (default: false)
* CHECK_COVERAGE - check for coverage and ship it to coveralls (default: false)
*
*******/
var Promise = require('bluebird');
var _ = require('lodash');
var through2 = require('through2');
var map = require('through2-map');
var split = require('split');
var join = require('path').join;
var cp = require('child_process');
var chalk = require('chalk');
var format = require('util').format;
var NL_RE = /(\r?\n)/g;
var ROOT = join(__dirname, '..');
var GRUNT = join(ROOT, 'node_modules', '.bin', 'grunt');
var ENV = _.clone(process.env);
var JENKINS = !!ENV.JENKINS_HOME;
var TASKS = [];
var output; // main output stream
var taskOut; // task output stream
task('NODE_UNIT', true, function() {
if (!JENKINS) {
return grunt('mochacov:ci_unit');
}
return grunt('mochacov:jenkins_unit');
});
task('NODE_INTEGRATION', true, function() {
var branch = ENV.ES_REF;
return node('scripts/generate', '--no-api', '--branch', branch).then(
function() {
var target = (JENKINS ? 'jenkins_' : '') + 'integration:' + branch;
return grunt('esvm:ci_env', 'mocha_' + target, 'esvm_shutdown:ci_env');
}
);
});
task('SAUCE_LABS', false, function() {
return new Promise(function(resolve, reject) {
// build the clients and start the server, once the server is ready call trySaucelabs()
var serverTasks = [
'browser_clients:build',
'run:browser_test_server:keepalive',
];
spawn(GRUNT, serverTasks, function(proc) {
var toLines = split();
proc.stdout.pipe(toLines).pipe(
through2(function(line, enc, cb) {
cb();
if (String(line).indexOf('listening on port 8000') === -1) return;
trySaucelabs()
.finally(function() {
if (proc) proc.kill();
})
.then(resolve, reject);
proc.on('exit', function() {
proc = null;
});
proc.stdout.unpipe(toLines);
toLines.end();
})
);
})
// ignore server errors
.catch(_.noop);
// attempt to run tests on saucelabs and retry if it fails
var saucelabsAttempts = 0;
function trySaucelabs() {
saucelabsAttempts++;
return new Promise(function(resolve, reject) {
log(chalk.green('saucelabs attempt #', saucelabsAttempts));
spawn(GRUNT, ['saucelabs-mocha'], function(cp) {
var failedTests = 0;
cp.stdout.pipe(split()).pipe(
map(function(line) {
failedTests += String(line).trim() === 'Passed: false' ? 1 : 0;
})
);
cp.on('error', reject);
cp.on('exit', function(code) {
if (code > 0) {
if (failedTests > 0) {
return reject(new Error('Browser tests failed'));
}
if (saucelabsAttempts >= 3) {
return reject(
new Error(
'Saucelabs is like really really down. Tried 3 times'
)
);
}
log(chalk.blue('trying saucelabs again...'));
return trySaucelabs().then(resolve, reject);
}
return resolve();
});
})
// swallow spawn() errors, custom error handler in place
.catch(_.noop);
});
}
});
});
task('CHECK_COVERAGE', false, function() {
return grunt('mochacov:ship_coverage').catch(function() {
log("FAILED TO SHIP COVERAGE! but that's okay");
});
});
execTask('SETUP', function() {
return Promise.try(function readVersion() {
if (!ENV.ES_V) {
if (ENV.ES_RELEASE) {
return ['v' + ENV.ES_RELEASE, ENV.ES_RELEASE];
}
if (ENV.ES_REF) {
return [ENV.ES_REF, null];
}
}
var match = ENV.ES_V.match(/^(.*)_nightly$/);
if (match) {
return [match[1], null];
}
if (/^(?:1\.\d+|0\.90)\..*$/.test(ENV.ES_V)) {
return ['v' + ENV.ES_V, ENV.ES_V];
}
throw new Error('unable to parse ES_V ' + ENV.ES_V);
})
.then(function readOtherConf(ver) {
if (!ver) {
throw new Error(
'Unable to run the ci script without at least an ES_REF or ES_RELEASE environment var.'
);
}
log('ES_PORT:', (ENV.ES_PORT = parseInt(ENV.ES_PORT || 9400, 10)));
log('ES_HOST:', (ENV.ES_HOST = ENV.ES_HOST || 'localhost'));
if (ver[0]) log('ES_REF:', (ENV.ES_REF = ver[0]));
else delete ENV.ES_REF;
if (ver[1]) log('ES_RELEASE:', (ENV.ES_RELEASE = ver[1]));
else delete ENV.ES_RELEASE;
})
.then(function readTasks() {
if (!ENV.RUN) {
return _.filter(TASKS, { default: true });
}
return ENV.RUN.split(',')
.map(function(name) {
return _.find(TASKS, { name: name.trim() });
})
.filter(Boolean);
});
})
.then(function(queue) {
if (!queue.length) {
throw new Error('no tasks to run');
}
// Recursively do tasks until the queue is empty
return (function next() {
if (!queue.length) return;
return execTask(queue.shift()).then(next);
})();
})
.then(function() {
logImportant(chalk.bold.green('✔︎ SUCCESS'));
})
.catch(function(e) {
logImportant(chalk.bold.red('✗ FAILURE\n\n' + e.stack));
// override process exit code once it is ready to close
process.once('exit', function() {
process.exit(1);
});
});
/** ****
* utils
******/
function log() {
var chunk = format.apply(null, arguments);
(taskOut || output || process.stdout).write(chunk + '\n');
}
function logImportant(text) {
log('\n------------');
log(text);
log('------------');
}
function push(m) {
return function() {
var args = _.toArray(arguments);
var cb = args.pop();
this.push(m.apply(this, args));
cb();
};
}
function indent() {
var str = through2(
push(function(chunk) {
return String(chunk).replace(NL_RE, '$1 ');
}),
push(function() {
return '\n';
})
);
str.push(' ');
return str;
}
function task(name, def, fn) {
if (_.isFunction(def)) {
fn = def;
def = true;
}
TASKS.push({
name: name,
default: def,
fn: fn,
});
}
function execTask(name, task) {
if (_.isObject(name)) {
task = name.fn;
name = name.name;
}
output = through2();
output.pipe(
process.stdout,
{ end: false }
);
log(chalk.white.underline(name));
taskOut = through2();
taskOut.pipe(indent()).pipe(output);
function flushTaskOut() {
return new Promise(function(resolve) {
// wait for the taskOut to finish writing before continuing
output.once('finish', function() {
log('');
resolve();
});
taskOut.end(); // will end output as well
taskOut = output = null;
});
}
return Promise.try(task).finally(flushTaskOut);
}
function spawn(file, args, block) {
return new Promise(function(resolve, reject) {
var proc = cp.spawn(file, args, {
cwd: ROOT,
env: ENV,
stdio: [0, 'pipe', 'pipe'],
});
proc.stdout.pipe(
taskOut,
{ end: false }
);
proc.stderr.pipe(
taskOut,
{ end: false }
);
var stdout = '';
proc.stdout.pipe(
through2(function(chunk, enc, cb) {
stdout += chunk;
cb();
})
);
if (block) block(proc);
proc.on('exit', function(code) {
if (code > 0) {
reject(new Error('non-zero exit code: ' + code));
} else {
resolve(stdout);
}
});
proc.on('error', function(origErr) {
reject(
new Error(
'Unable to execute "' +
file +
' ' +
args.join(' ') +
'": ' +
origErr.message
)
);
});
});
}
function node(/* args... */) {
return spawn(process.execPath, _.toArray(arguments));
}
function grunt(/* args... */) {
return spawn(GRUNT, _.toArray(arguments));
}

View File

@ -12,10 +12,6 @@ var argv = require('optimist').options({
default: true,
boolean: true,
},
tests: {
default: true,
boolean: true,
},
update: {
default: true,
boolean: true,
@ -262,10 +258,7 @@ function createArchive(branch) {
function generateStep(branch) {
return function(done) {
async.parallel(
[
argv.api && async.apply(require('./js_api'), branch),
argv.tests && async.apply(require('./yaml_tests'), branch),
].filter(Boolean),
[argv.api && async.apply(require('./js_api'), branch)].filter(Boolean),
done
);
};

View File

@ -1,60 +0,0 @@
module.exports = function(branch, done) {
/**
* Creates a JSON version of the YAML test suite that can be simply bundled for use in the browser.
*/
var jsYaml = require('js-yaml');
var fs = require('fs');
var async = require('async');
var chalk = require('chalk');
var path = require('path');
var fromRoot = path.join.bind(path, require('find-root')(__dirname));
var utils = require(fromRoot('src/lib/utils'));
var tests = {}; // populated in readYamlTests
var esDir = fromRoot('src/_elasticsearch_' + utils.snakeCase(branch));
// generate the yaml tests
async.series([readYamlTests, writeYamlTests, writeTestIndex], done);
function readYamlTests(done) {
var testDir = path.join(esDir, 'rest-api-spec/test/');
function readDirectories(dir) {
fs.readdirSync(dir).forEach(function(filename) {
var filePath = path.join(dir, filename);
var stat = fs.statSync(filePath);
if (stat.isDirectory()) {
readDirectories(filePath);
} else if (filename.match(/\.yaml$/)) {
var file = (tests[path.relative(testDir, filePath)] = []);
jsYaml.loadAll(fs.readFileSync(filePath, 'utf8'), function(doc) {
file.push(doc);
});
}
});
}
readDirectories(testDir);
done();
}
function writeYamlTests(done) {
var testFile = fromRoot(
'test/integration/yaml_suite/yaml_tests_' +
utils.snakeCase(branch) +
'.json'
);
fs.writeFileSync(testFile, JSON.stringify(tests, null, ' '), 'utf8');
console.log(chalk.white.bold('wrote') + ' YAML tests as JSON to', testFile);
done();
}
function writeTestIndex(done) {
var file = fromRoot(
'test/integration/yaml_suite/index_' + utils.snakeCase(branch) + '.js'
);
fs.writeFileSync(file, "require('./run')('" + branch + "');\n", 'utf8');
console.log(chalk.white.bold('wrote') + ' YAML index to', file);
done();
}
};

View File

@ -1,61 +0,0 @@
#!/bin/bash
export JAVA_HOME="/usr/lib/jvm/jdk8"
lockpath="/var/lock/setup_nodejs"
# pass a file name to aquire a lock
function get_lock {
echo "attempting to get a lock on $lockpath"
lockfile -5 -r 120 "$lockpath"
if [[ $? -gt 0 ]]; then
echo "failed to get lock file $lockpath within 10 minutes"
exit 1;
else
echo "acquired lock file $lockpath"
fi
}
# clear all aquired locks
function release_lock {
rm -f "$lockpath"
echo "cleared lock file $lockpath"
}
# execute a command, and exit if it fails
function crit {
$*
CODE=$?
if [[ $CODE -gt 0 ]]; then
echo "last command was critical, but it reported non-zero exit code $CODE";
release_lock # clear any locks
exit $CODE;
fi
}
# install a specific version of Node and the latest version of NPM within that install
function install_node {
local version=$1
if [[ $(type -t nvm) != "function" ]]; then
source /mnt/jenkins/nvm/nvm.sh
ulimit -c unlimited
fi
echo "installing node $version";
crit nvm install "$version"
if [[ "$(which npm)" == "" ]]; then
echo "npm is missing, reinstalling node version $version";
crit nvm deactivate;
crit nvm uninstall "$version";
install_node "$version";
return
fi
}
get_lock
install_node "$(cat ./.node-version)"
npm install
release_lock
ES_PATH_REPO="./.es-snapshot-repos/$EXECUTOR_NUMBER/" ES_PORT=$((9400 + EXECUTOR_NUMBER)) RUN=NODE_UNIT,NODE_INTEGRATION VERBOSE=true node ./scripts/ci.js

7
scripts/mocha.js Normal file
View File

@ -0,0 +1,7 @@
process.argv.splice(
2,
0,
...[require.resolve('../test/unit/index.js'), '--reporter', 'nyan']
);
require('mocha/bin/mocha');

View File

@ -1,4 +0,0 @@
---
extends: ../.eslintrc
env:
mocha: true

6
test/.eslintrc.js Normal file
View File

@ -0,0 +1,6 @@
module.exports = {
extends: '../.eslintrc.js',
env: {
mocha: true,
},
};

View File

@ -1,36 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Mocha Tests</title>
<link rel="stylesheet" href="mocha.css" />
<link rel="stylesheet" href="screencast-reporter.css" />
</head>
<body>
<div id="mocha"></div>
<script src="expect.js"></script>
<script src="mocha.js"></script>
<script src="screencast-reporter.js"></script>
<script>
mocha.setup('bdd');
mocha.reporter(ScreencastReporter);
</script>
<!-- libs -->
<script src="jquery.js"></script>
<script src="angular.js"></script>
<script src="angular-mocks.js"></script>
<!-- builds -->
<script src="elasticsearch.js"></script>
<script src="elasticsearch.angular.js"></script>
<script src="elasticsearch.jquery.js"></script>
<!-- tests -->
<script src="build_tests.js"></script>
<script>
mochaRunner = mocha.run().on('end', function(){
window.mochaResults = this.stats;
});
</script>
</body>
</html>

View File

@ -1,25 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Mocha Tests</title>
<link rel="stylesheet" href="mocha.css" />
<link rel="stylesheet" href="screencast-reporter.css" />
</head>
<body>
<div id="mocha"></div>
<script src="expect.js"></script>
<script src="mocha.js"></script>
<script src="screencast-reporter.js"></script>
<script>
mocha.setup('bdd');
mocha.reporter(ScreencastReporter);
</script>
<script src="unit_tests.js"></script>
<script>
mochaRunner = mocha.run().on('end', function(){
window.mochaResults = this.stats;
});
</script>
</body>
</html>

View File

@ -4,12 +4,21 @@
// which prevent sinon from being able to ensure
// timeouts aren't being left behind
var express = require('express');
var app = express().post('/_search', function(req, res) {
res.json(200, { hits: { hits: [] } });
var http = require('http');
var server = http.createServer(function(req, res) {
if (req.url === '/_search' && req.method === 'POST') {
const body = JSON.stringify({ hits: { hits: [] } });
res.setHeader('Content-Type', 'application/json');
res.setHeader('Content-Length', body.length);
res.end(body);
} else {
res.statusCode = 404;
res.setHeader('Content-Type', 'text/plain');
res.end('Not Found');
}
});
var server = require('http').createServer(app);
server.listen(function() {
var port = server.address().port;
if (process.connected) {

View File

@ -1,165 +0,0 @@
var BROWSER = process.env.browser;
var VERBOSE = process.env.VERBOSE;
var JENKINS = !!process.env.JENKINS_HOME;
var es;
if (BROWSER) {
es = window.elasticsearch;
} else {
es = require('../../../src/elasticsearch');
}
var _ = require('lodash');
var path = require('path');
var fs = require('fs');
var fromRoot = _.bindKey(path, 'join', require('find-root')(__dirname));
var Bluebird = require('bluebird');
// current client
var client = null;
module.exports = {
create: function create(apiVersion, port, host, cb) {
// create a client and ping the server for up to 15 seconds
doCreateClient(
{
logConfig: null,
},
function() {
var attemptsRemaining = 60;
var timeout = 500;
(function ping() {
client.info(
{
maxRetries: 0,
requestTimeout: 100,
},
function(err, resp) {
if (err && --attemptsRemaining) {
setTimeout(ping, timeout);
} else if (err) {
cb(
new Error(
'unable to establish contact with ES at ' +
JSON.stringify({
host: host,
port: port,
err: err,
})
)
);
} else if (resp.name !== 'elasticsearch_js_test_runner') {
console.log(resp);
cb(
new Error(
'Almosted wiped out another es node. Shut-down all instances of ES and try again.'
)
);
} else {
// create a new client
doCreateClient(function() {
cb(void 0, client);
});
}
}
);
})();
}
);
function doCreateClient(options, cb) {
if (typeof options === 'function') {
cb = options;
options = {};
}
var logConfig = {};
if (_.has(options, 'logConfig')) {
logConfig = options.logConfig;
} else {
if (BROWSER) {
logConfig.type = 'console';
} else if (JENKINS || !VERBOSE) {
logConfig.type = 'stdio';
} else {
logConfig.type = 'tracer';
}
logConfig.level = JENKINS || VERBOSE ? 'trace' : 'error';
}
if (logConfig && logConfig.type === 'tracer') {
try {
fs.unlinkSync(fromRoot('elasticsearch-tracer.log'));
} catch (e) {}
}
// close existing client
if (client) {
client.close();
}
client = new es.Client({
apiVersion: apiVersion,
hosts: [
{
host: host,
port: port,
},
],
log: logConfig,
defer: function() {
return Bluebird.defer();
},
});
client.clearEs = function() {
return Bluebird.all([
client.indices.delete({ index: '*', ignore: 404 }),
client.indices.deleteTemplate({ name: '*', ignore: 404 }),
client.snapshot
.getRepository()
.then(_.keys)
.map(
function(repo) {
return client.snapshot
.get({
repository: repo,
snapshot: '_all',
})
.then(
function(resp) {
return _.map(resp.snapshots, 'snapshot');
},
function() {
return [];
}
)
.map(
function(snapshot) {
return client.snapshot.delete({
repository: repo,
snapshot: snapshot,
});
},
{ concurrency: 1 }
)
.then(function() {
return client.snapshot.deleteRepository({
repository: repo,
});
});
},
{ concurrency: 1 }
),
]);
};
process.nextTick(cb);
}
},
get: function() {
return client;
},
};

View File

@ -1,42 +0,0 @@
module.exports = function(branch) {
var path = require('path');
var YamlFile = require('./yaml_file');
var root = require('find-root')(__dirname);
var rootReq = function(loc) {
return require(path.join(root, loc));
};
var _ = require('lodash');
var utils = rootReq('src/lib/utils');
var clientManager = require('./client_manager');
var port = parseInt(process.env.ES_PORT || 9200, 10);
var host = process.env.ES_HOST || 'localhost';
var _release = branch.match(/^v(\d+\.\d+)\.\d+$/);
var apiVersion = _release ? _release[1] : branch;
console.log(' branch:', branch);
console.log(' port:', port);
console.log(' api version:', apiVersion);
describe('integration', function() {
this.timeout(30000);
// before running any tests...
before(function(done) {
this.timeout(5 * 60 * 1000);
clientManager.create(apiVersion, port, host, done);
});
before(function() {
// make sure ES is empty
return clientManager.get().clearEs();
});
_.map(
require('./yaml_tests_' + utils.snakeCase(branch) + '.json'),
function(docs, filename) {
return new YamlFile(filename, docs);
}
);
});
};

View File

@ -1,781 +0,0 @@
/* eslint-disable no-console */
/**
* Class to wrap a single document from a yaml test file
*
* @constructor
* @class YamlDoc
* @param actions {Array} - The array of actions directly from the Yaml file
*/
module.exports = YamlDoc;
var _ = require('lodash');
var expect = require('expect.js');
var clientManager = require('./client_manager');
var inspect = require('util').inspect;
var implementedFeatures = [
'gtelte',
'regex',
'benchmark',
'stash_in_path',
'groovy_scripting',
];
/**
* The version that ES is running, in comparable string form XXX-XXX-XXX, fetched when needed
* @type {String}
*/
var ES_VERSION = null;
// core expression for finding a version
var versionExp = '((?:\\d+\\.){0,2}\\d+)(?:[\\.\\-]\\w+)?|';
// match all whitespace within a "regexp" match arg
var reWhitespaceRE = /\s+/g;
// match all comments within a "regexp" match arg
var reCommentsRE = /([\S\s]?)#[^\n]*\n/g;
/**
* Regular Expression to extract a version number from a string
* @type {RegExp}
*/
var versionRE = new RegExp('^(?:' + versionExp + ')$');
/**
* Regular Expression to extract a version range from a string
* @type {RegExp}
*/
var versionRangeRE = new RegExp(
'^(?:' + versionExp + ')\\s*\\-\\s*(?:' + versionExp + ')$'
);
/**
* Fetches the client.info, and parses out the version number to a comparable string
* @param done {Function} - callback
*/
function getVersionFromES(done) {
clientManager.get().info({}, function(err, resp) {
if (err) {
throw new Error('unable to get info about ES');
}
ES_VERSION = resp.version.number;
done();
});
}
/**
* Transform x.x.x into xxx.xxx.xxx, striping off any text at the end like beta or pre-alpha35
*
* @param {String} version - Version number represented as a string
* @return {String} - Version number represented as three numbers, separated by -, all numbers are
* padded with 0 and will be three characters long so the strings can be compared.
*/
function versionToComparableString(version, def) {
if (!version) {
return def;
}
var parts = _.map(version.split('.'), function(part) {
part = '' + _.parseInt(part);
return new Array(Math.max(4 - part.length, 0)).join('0') + part;
});
while (parts.length < 3) {
parts.push('000');
}
return parts.join('-');
}
/**
* Compare a version range to the ES_VERSION, determining if the current version
* falls within the range.
*
* @param {String} rangeString - a string representing two version numbers separated by a "-"
* @return {Boolean} - is the current version within the range (inclusive)
*/
function rangeMatchesCurrentVersion(rangeString, done) {
if (rangeString === 'all') {
return done(true);
}
if (!ES_VERSION) {
getVersionFromES(function() {
rangeMatchesCurrentVersion(rangeString, done);
});
return;
}
done(YamlDoc.compareRangeToVersion(rangeString, ES_VERSION));
}
function YamlDoc(doc, file) {
var self = this;
self.file = file;
self.description = _.keys(doc).shift();
self._stash = {};
self._last_requests_response = null;
// setup the actions, creating a bound and testable method for each
self._actions = _.map(
self.flattenTestActions(doc[self.description]),
function(action) {
// get the method that will do the action
var method = self['do_' + action.name];
// check that it's a function
expect(method || 'YamlDoc#' + action.name).to.be.a('function');
if (_.isPlainObject(action.args)) {
action.name += '(' + JSON.stringify(action.args) + ')';
} else if (action.args) {
action.name += '(' + action.args + ')';
}
// wrap in a check for skipping
action.bound = _.bind(method, self, action.args);
// create a function that can be passed to mocha or async
action.testable = function(_cb) {
function done(err) {
process.nextTick(function() {
if (err) {
err.message += ' in ' + action.name;
}
_cb(err);
});
}
if (self.skipping || self.file.skipping) {
return done();
}
if (method.length > 1) {
action.bound(done);
} else {
try {
action.bound();
process.nextTick(done);
} catch (err) {
done(err);
}
}
};
return action;
}
);
self.getActionsRunners = function() {
return self._actions.map(function(action) {
return function(cb) {
clientManager
.get()
.transport.log.debug(
'===========================\n' +
action.name +
'\n==========================='
);
return action.testable(cb);
};
});
};
}
YamlDoc.compareRangeToVersion = function(range, version) {
expect(range).to.match(versionRangeRE);
var rangeMatch = versionRangeRE.exec(range);
expect(version).to.match(versionRE);
var versionMatch = versionRE.exec(version);
var min = versionToComparableString(rangeMatch[1], -Infinity);
var max = versionToComparableString(rangeMatch[2], Infinity);
var comp = versionToComparableString(versionMatch[1], Infinity);
return (
(min === -Infinity || min <= comp) && (max === Infinity || max >= comp)
);
};
YamlDoc.prototype = {
/**
* convert tests actions
* from: [ {name:args, name:args}, {name:args}, ... ]
* to: [ {name:'', args:'' }, {name:'', args:''} ]
* so it's easier to work with
* @param {ArrayOfObjects} config - Actions to be taken as defined in the yaml specs
*/
flattenTestActions: function(config) {
// creates [ [ {name:"", args:"" }, ... ], ... ]
// from [ {name:args, name:args}, {name:args} ]
var actionSets = _.map(config, function(set) {
return _.map(_.toPairs(set), function(pair) {
return { name: pair[0], args: pair[1] };
});
});
// do a single level flatten, merge=ing the nested arrays from step one
// into a master array, creating an array of action objects
return _.reduce(
actionSets,
function(note, set) {
return note.concat(set);
},
[]
);
},
/**
* Itterate over each of the actions, provides the testable function, and a name/description.
* return a litteral false to stop itterating
* @param {Function} ittr - The function to call for each action.
* @return {undefined}
*/
each: function(ittr) {
for (var i = 0; i < this._actions.length; i++) {
if (ittr(this._actions[i].testable, this._actions[i].name) === false) {
break;
}
}
},
/**
* Get a value from the last response, using dot-notation
*
* Example
* ===
*
* get '_source.tags.1'
*
* from {
* _source: {
* tags: [
* 'one',
* 'two'
* ]
* }
* }
*
* returns 'two'
*
* @param {string} path - The dot-notation path to the value needed.
* @return {*} - The value requested, or undefined if it was not found
*/
get: function(path, from) {
var self = this;
var log =
process.env.LOG_GETS && !from ? console.log.bind(console) : function() {};
var i;
if (path === '$body') {
// shortcut, the test just wants the whole body
return self._last_requests_response;
} else if (path) {
path = path.replace(/\.\$([a-zA-Z0-9_]+)/g, function(m, name) {
return '.' + self._stash[name];
});
}
if (!from) {
if (path[0] === '$') {
from = self._stash;
path = path.substring(1);
} else {
from = self._last_requests_response;
}
}
log('getting', path, 'from', from);
var steps = _.map(
path ? path.replace(/\\\./g, '\uffff').split('.') : [],
function(step) {
return step.replace(/\uffff/g, '.');
}
);
var remainingSteps;
for (i = 0; from != null && i < steps.length; i++) {
if (from[steps[i]] === void 0) {
remainingSteps = steps
.slice(i)
.join('.')
.replace(/\\\./g, '.');
from = from[remainingSteps];
break;
} else {
from = from[steps[i]];
}
}
log('found', typeof from !== 'function' ? from : 'function');
return from;
},
/**
* Do a skip operation, setting the skipping flag to true if the version matches
* the range defined in args.version
*
* @param args
* @param done
*/
do_skip: function(args, done) {
if (args.version) {
return rangeMatchesCurrentVersion(
args.version,
_.bind(function(match) {
if (match) {
if (this.description === 'setup') {
this.file.skipping = true;
// console.log('skipping this file' + (args.reason ? ' because ' + args.reason : ''));
} else {
this.skipping = true;
// console.log('skipping the rest of this doc' + (args.reason ? ' because ' + args.reason : ''));
}
} else {
this.skipping = false;
this.file.skipping = false;
}
done();
}, this)
);
}
if (args.features) {
var features = Array.isArray(args.features)
? args.features
: [args.features];
var notImplemented = _.difference(features, implementedFeatures);
if (notImplemented.length) {
if (this.description === 'setup') {
this.file.skipping = true;
console.log(
'skipping this file because ' +
notImplemented.join(' & ') +
' are not implemented'
);
} else {
this.skipping = true;
console.log(
'skipping the rest of this doc because ' +
notImplemented.join(' & ') +
' are not implemented'
);
}
}
return done();
}
},
/**
* Do a request, as outlined in the args
*
* @param {[type]} args [description]
* @param {Function} done [description]
* @return {[type]} [description]
*/
do_do: function(args, done) {
var catcher;
if (process.env.LOG_DO) {
var __done = done;
done = function(err, resp) {
console.log('doing', clientActionName, 'with', params);
console.log('got', resp);
__done(err, resp);
};
}
// resolve the catch arg to a value used for matching once the request is complete
switch (args.catch) {
case void 0:
catcher = null;
break;
case 'missing':
catcher = 404;
break;
case 'conflict':
catcher = 409;
break;
case 'forbidden':
catcher = 403;
break;
case 'request_timeout':
catcher = 408;
break;
case 'request':
catcher = /.*/;
break;
case 'param':
catcher = TypeError;
break;
default:
catcher = args.catch.match(/^\/(.*)\/$/);
if (catcher) {
catcher = new RegExp(catcher[1]);
} else {
return done(new TypeError('unsupported catch type ' + args.catch));
}
}
delete args.catch;
var inputParams = {};
// resolve the headers for a request
if (args.headers) {
inputParams.headers = args.headers;
delete args.headers;
}
var otherKeys = _.keys(args);
var action = otherKeys.shift();
if (otherKeys.length) {
return done(
new TypeError(
'Unexpected top-level args to "do": ' + otherKeys.join(', ')
)
);
}
var client = clientManager.get();
var clientActionName = _.map(action.split('.'), _.camelCase).join('.');
var clientAction = this.get(clientActionName, client);
_.assign(inputParams, args[action]);
var params = _.transform(
inputParams,
_.bind(function(params, val, name) {
var camelName = _.camelCase(name);
// search through the params and url peices to find this param name
var paramName = name;
var spec = clientAction && clientAction.spec;
var knownParam = spec && spec.params && spec.params[camelName];
var knownUrlParam =
spec &&
!knownParam &&
!!_.find(spec.url ? [spec.url] : spec.urls, function(url) {
if (
(url.opt && url.opt[camelName]) ||
(url.req && url.req[camelName])
) {
return true;
}
});
// if we do know this param name, use the camelCase verison
if (knownParam || knownUrlParam) {
paramName = camelName;
}
// for ercursively traversing the params to replace '$stashed' vars
var transformObject = function(vals, val, i) {
if (_.isString(val)) {
val = val[0] === '$' ? this.get(val) : val;
} else if (_.isPlainObject(val) || _.isArray(val)) {
val = _.transform(val, transformObject);
}
vals[i] = val;
}.bind(this);
transformObject(params, val, paramName);
}, this),
{}
);
expect(clientAction || clientActionName).to.be.a('function');
if (!isNaN(parseFloat(catcher))) {
params.ignore = _.union(params.ignore || [], [catcher]);
catcher = null;
}
var timeoutId;
var cb = _.bind(function(error, body) {
this._last_requests_response = body;
clearTimeout(timeoutId);
if (error) {
if (catcher) {
if (catcher instanceof RegExp) {
// error message should match the regexp
expect('' + error).to.match(catcher);
error = null;
} else if (typeof catcher === 'function') {
// error should be an instance of
expect(error).to.be.a(catcher);
error = null;
} else {
return done(new Error('Invalid catcher ' + catcher));
}
} else {
return done(error);
}
}
done(error);
}, this);
var req = clientAction.call(client, params, cb);
timeoutId = setTimeout(
function() {
// request timed out, so we will skip the rest of the tests and continue
req.abort();
this.skipping = true;
this._last_requests_response = {};
done();
}.bind(this),
20000
);
},
/**
* Set a value from the respose into the stash
*
* Example
* ====
* { _id: id } # stash the value of `response._id` as `id`
*
* @param {Object} args - The object set to the "set" key in the test
* @return {undefined}
*/
do_set: function(args) {
_.forOwn(
args,
_.bind(function(name, path) {
this._stash[name] = this.get(path);
}, this)
);
},
/**
* Test that the specified path exists in the response and has a
* true value (eg. not 0, false, undefined, null or the empty string)
*
* @param {string} path - Path to the response value to test
* @return {undefined}
*/
do_is_true: function(path) {
var val = this.get(path);
try {
expect(Boolean(val)).to.be(true, 'path: ' + path);
} catch (e) {
throw new Error('expected path "' + path + '" to be true but got ' + val);
}
},
/**
* Test that the specified path exists in the response and has a
* false value (eg. 0, false, undefined, null or the empty string)
*
* @param {string} path - Path to the response value to test
* @return {undefined}
*/
do_is_false: function(path) {
var val = this.get(path);
try {
expect(Boolean(val)).to.be(false, 'path: ' + path);
} catch (e) {
throw new Error(
'expected path "' + path + '" to be false but got ' + val
);
}
},
/**
* Test that the response field (arg key) matches the value specified.
*
* @param {Object} args - Args can be specified in a number of formats:
*
* object{ <path>: <string|number|obj> }
* - used to match simple values against properties of the last response body
* - keys are "paths" to values in the previous response
* - values are what they should match
* example:
* resp:
* {
* hits: {
* total: 100,
* hits: [ ... ]
* }
* }
* args:
* {
* "hits.total": 100,
* }
*
*
* object{ <path>: <RegExp> }
* - regexp is expressed as a string that starts and ends with a /
* - we have to make several replacements on the string before converting
* it into a regexp because javascript doesn't support the "verbose" mode
* they are written for.
*
* @return {undefined}
*/
do_match: function(args) {
var self = this;
// recursively replace all $var within args
_.forOwn(args, function recurse(val, key, lvl) {
if (_.isObject(val)) {
return _.each(val, recurse);
}
if (_.isString(val)) {
lvl[key] = val.replace(/\$[a-zA-Z0-9_]+/g, function(name) {
return self.get(name);
});
}
});
_.forOwn(
args,
_.bind(function(match, path) {
var origMatch = match;
var maybeRE = false;
var usedRE = false;
if (_.isString(match)) {
// convert the matcher into a compatible string for building a regexp
maybeRE = match
// replace comments, but allow the # to be escaped like \#
.replace(reCommentsRE, function(match, prevChar) {
if (prevChar === '\\') {
return match;
} else {
return prevChar + '\n';
}
})
// remove all whitespace from the expression, all meaningful
// whitespace is represented with \s
.replace(reWhitespaceRE, '');
var startsWithSlash = maybeRE[0] === '/';
var endsWithSlash = maybeRE[maybeRE.length - 1] === '/';
if (startsWithSlash && endsWithSlash) {
usedRE = true;
match = new RegExp(maybeRE.substr(1, maybeRE.length - 2));
}
}
var val = this.get(path);
var test = 'eql';
if (match instanceof RegExp) {
test = 'match';
// convert falsy values to an empty string so that regexp doesn't
// cast them to the strings "false", "undefined", etc.
val = val || '';
}
try {
expect(val).to[test](match);
} catch (e) {
var msg = [
'\nUnable to match',
inspect(match),
'with the path',
inspect(path),
'and value',
inspect(val),
];
if (usedRE) {
msg.push('and original matcher', '|' + origMatch);
}
msg.push('original error', e.message);
throw new Error(msg.join('\n'));
}
}, this)
);
},
/**
* Test that the response field (arg key) is less than the value specified
*
* @param {Object} args - Hash of fields->values that need to be checked
* @return {undefined}
*/
do_lt: function(args) {
_.forOwn(
args,
_.bind(function(num, path) {
expect(this.get(path)).to.be.below(num, 'path: ' + path);
}, this)
);
},
/**
* Test that the response field (arg key) is less than the value specified
*
* @param {Object} args - Hash of fields->values that need to be checked
* @return {undefined}
*/
do_lte: function(args) {
_.forOwn(
args,
_.bind(function(num, path) {
expect(this.get(path) <= num).to.be.ok('path: ' + path);
}, this)
);
},
/**
* Test that the response field (arg key) is greater than the value specified
*
* @param {Object} args - Hash of fields->values that need to be checked
* @return {undefined}
*/
do_gt: function(args) {
_.forOwn(
args,
_.bind(function(num, path) {
expect(this.get(path)).to.be.above(num, 'path: ' + path);
}, this)
);
},
/**
* Test that the response field (arg key) is greater than the value specified
*
* @param {Object} args - Hash of fields->values that need to be checked
* @return {undefined}
*/
do_gte: function(args) {
_.forOwn(
args,
_.bind(function(num, path) {
expect(this.get(path) >= num).to.be.ok('path: ' + path);
}, this)
);
},
/**
* Test that the response field (arg key) has a length equal to that specified.
* For object values, checks the length of the keys.
*
* @param {Object} args - Hash of fields->values that need to be checked
* @return {undefined}
*/
do_length: function(args) {
_.forOwn(
args,
_.bind(function(len, path) {
expect(_.size(this.get(path))).to.eql(len, 'path: ' + path);
}, this)
);
},
};

View File

@ -1,49 +0,0 @@
/* eslint-env mocha */
/* eslint-disable no-console */
/**
* Class representing a YAML file
* @type {[type]}
*/
module.exports = YamlFile;
var YamlDoc = require('./yaml_doc');
var clientManager = require('./client_manager');
var _ = require('lodash');
var async = require('async');
function YamlFile(filename, docs) {
var file = this;
// file level skipping flag
file.skipping = false;
describe(filename, function() {
file.docs = _.map(docs, function(doc) {
doc = new YamlDoc(doc, file);
if (doc.description === 'setup') {
beforeEach(
/* doc */ function(done) {
async.series(doc.getActionsRunners(), done);
}
);
} else {
it(doc.description, function(done) {
async.series(doc.getActionsRunners(), done);
});
}
});
afterEach(
/* doc */ function() {
clientManager
.get()
.transport.log.debug(
'===========================\n' +
'Cleanup\n' +
'==========================='
);
return clientManager.get().clearEs();
}
);
});
}

View File

@ -1,22 +0,0 @@
describe('Yaml Test Reader', function() {
var YamlDoc = require('../../integration/yaml_suite/yaml_doc');
var compare = YamlDoc.compareRangeToVersion;
var expect = require('expect.js');
describe('version range comparison', function() {
it('supports unbounded ranges', function() {
expect(compare(' - ', '999999999999999999')).to.be(true);
expect(compare('0 - ', '999999999999999999')).to.be(true);
expect(compare(' - 1', '999999999999999999')).to.be(false);
});
it('supports bound ranges', function() {
expect(compare('1.4 - 1.5', '1.4.4')).to.be(true);
expect(compare('1.4.4 - 1.5', '1.4.4')).to.be(true);
expect(compare('1.4 - 1.4.4', '1.4.4')).to.be(true);
expect(compare('1.4 - 1.4.3', '1.4.4')).to.be(false);
expect(compare('0.90 - 1.2', '1.0')).to.be(true);
expect(compare('0.90 - 1.2', '1.4')).to.be(false);
});
});
});

View File

@ -1,220 +0,0 @@
/**
* ESJS reporter for running and collecting mocha test results.
*
* @param {Runner} runner
* @api public
*/
module.exports = JenkinsReporter;
var Base = require('mocha/lib/reporters/base');
var _ = require('lodash');
var chalk = require('chalk');
var makeJUnitXml = require('./make_j_unit_xml');
var fs = require('fs');
var path = require('path');
var inspect = require('util').inspect;
var log = (function() {
var locked = _.bind(process.stdout.write, process.stdout);
return function(str) {
if (typeof str !== 'string') {
str = inspect(str);
}
locked(str);
};
})();
var integration = _.find(process.argv, function(arg) {
return arg.indexOf('test/integration') > -1;
});
var unit = _.find(process.argv, function(arg) {
return arg.indexOf('test/unit') > -1;
});
var output;
if (unit) {
output = path.join(__dirname, '../junit-node-unit.xml');
} else if (integration) {
output = path.join(__dirname, '../junit-node-integration.xml');
} else {
throw new Error('unable to detect unit or integration tests');
}
function JenkinsReporter(runner) {
Base.call(this, runner);
var stats = this.stats;
var rootSuite = {
results: [],
suites: [],
};
var stack = [rootSuite];
function indt() {
return new Array(stack.length + 1).join(' ');
}
runner.on('suite', function(suite) {
if (suite.root) {
return;
}
// suite
suite = {
name: suite.fullTitle(),
results: [],
start: Date.now(),
stdout: '',
stderr: '',
};
// append to the previous stack leader
if (!stack[0].suites) {
stack[0].suites = [];
}
stack[0].suites.push(suite);
// push the suite onto the top of the stack
stack.unshift(suite);
});
runner.on('suite end', function(suite) {
if (suite.root) {
return;
}
stack[0].time = Date.now() - stack[0].start;
stack.shift();
});
runner.on('fail', function(test) {
if (test.type === 'hook') {
runner.emit('test end', test);
}
});
runner.on('test end', function(test) {
if (test.state === 'passed') {
log(chalk.green('.'));
} else if (test.pending) {
log(chalk.grey('.'));
return;
} else {
log(chalk.red('x'));
}
var errMsg = void 0;
if (test.err) {
errMsg = test.err.stack || test.err.toString();
// FF / Opera do not add the message
if (!~errMsg.indexOf(test.err.message)) {
errMsg = test.err.message + '\n' + errMsg;
}
// <=IE7 stringifies to [Object Error]. Since it can be overloaded, we
// check for the result of the stringifying.
if (errMsg === '[object Error]') {
errMsg = test.err.message;
}
// Safari doesn't give you a stack. Let's at least provide a source line.
if (
!test.err.stack &&
test.err.sourceURL &&
test.err.line !== undefined
) {
errMsg += '\n(' + test.err.sourceURL + ':' + test.err.line + ')';
}
console.error(
_.map(errMsg.split('\n'), function(line) {
return indt() + ' ' + line;
}).join('\n')
);
}
if (stack[0]) {
stack[0].results.push({
name: test.title,
time: test.duration,
pass: test.state === 'passed',
test: test,
stdout: stack[0].stdout,
stderr: stack[0].stderr,
});
stack[0].stdout = stack[0].stderr = '';
}
});
runner.on('hook end', function(hook) {
if (
hook.title.indexOf('"after each"') > -1 &&
stack[0] &&
stack[0].results.length
) {
var result = _.last(stack[0].results);
result.stdout += stack[0].stdout;
result.stderr += stack[0].stderr;
stack[0].stdout = stack[0].stderr = '';
}
});
runner.on('end', function() {
restoreStdio();
var xml = makeJUnitXml('node ' + process.version, {
stats: stats,
suites: _.map(rootSuite.suites, function removeElements(suite) {
var s = {
name: suite.name,
start: suite.start,
time: suite.time || 0,
results: suite.results,
stdout: suite.stdout,
stderr: suite.stderr,
};
if (suite.suites) {
s.suites = _.map(suite.suites, removeElements);
}
return s;
}),
});
fs.writeFileSync(output, xml, 'utf8');
console.log(
'\n' +
[
'tests complete in ' +
Math.round(stats.duration / 10) / 100 +
' seconds',
' fail: ' + chalk.red(stats.failures),
' pass: ' + chalk.green(stats.passes),
' pending: ' + chalk.grey(stats.pending),
].join('\n')
);
});
// overload the write methods on stdout and stderr
['stdout', 'stderr'].forEach(function(name) {
var obj = process[name];
var orig = obj.write;
obj.write = function(chunk) {
if (stack[0]) {
stack[0][name] = (stack[0][name] || '') + chunk;
}
// orig.apply(obj, arguments);
};
obj.__restore = function() {
this.write = orig;
};
});
function restoreStdio() {
process.stdout.__restore();
process.stderr.__restore();
}
}

View File

@ -1,110 +0,0 @@
/**
* The JUnit xml output desired by Jenkins essentially looks like this:
*
* testsuites:
* - testsuite: (name, timestamp, hostname, tests, failures, errors, time)
* - testcase: (error or failure, name, classname, time)
*
* Full XSD avaliable [here](http://windyroad.com.au/dl/Open%20Source/JUnit.xsd)
*
* from
*
* {
* stats: {
*
* }
* suite: [
* {
* name:
* results: []
* suites: [] // optional
* }
* ]
* }
*/
module.exports = makeJUnitXml;
var testXml = require('xmlbuilder');
var suites = testXml.create('testsuites');
var suiteCount = 0;
var moment = require('moment');
var _ = require('lodash');
var chalk = require('chalk');
function makeJUnitXml(runnerName, testDetails) {
_.each(testDetails.suites, function serializeSuite(suiteInfo) {
var suite = suites.ele('testsuite', {
package: 'elasticsearch-js',
id: suiteCount++,
name: suiteInfo.name,
timestamp: moment(suiteInfo.start).toJSON(),
hostname: 'localhost',
tests: (suiteInfo.results && suiteInfo.results.length) || 0,
failures: _.filter(suiteInfo.results, { pass: false }).length,
errors: 0,
time: suiteInfo.time / 1000,
});
_.each(suiteInfo.results, function(testInfo) {
var section;
var integration = false;
if (suiteInfo.name.match(/\/.*\.yaml$/)) {
section = suiteInfo.name
.split('/')
.slice(0, -1)
.join('/')
.replace(/\./g, '/');
} else {
section = suiteInfo.name.replace(/\./g, ',');
}
if (section.indexOf('integration ') === 0) {
section = section.replace(/^integration /, '');
integration = true;
}
var testcase = suite.ele('testcase', {
name: testInfo.name,
time: (testInfo.time || 0) / 1000,
classname:
runnerName + (integration ? ' - integration' : '') + '.' + section,
});
if (testInfo.errMsg) {
testcase.ele('failure', {
message: testInfo.errMsg,
type: 'AssertError',
});
} else if (!testInfo.pass) {
testcase.ele('error', {
message: 'Unknown Error',
type: 'TestError',
});
}
giveOutput(testcase, testInfo);
});
if (suiteInfo.suites) {
_.each(suiteInfo.suites, serializeSuite);
}
giveOutput(suite, suiteInfo);
});
return suites.toString({ pretty: true });
}
function giveOutput(el, info) {
var out = info.stdout.trim();
var err = info.stderr.trim();
if (out) {
el.ele('system-out', {}).cdata(chalk.stripColor(out));
}
if (err) {
el.ele('system-err', {}).cdata(chalk.stripColor(err));
}
}

View File

@ -1,137 +0,0 @@
/* eslint-disable import/no-unresolved */
var express = require('express');
var http = require('http');
var fs = require('fs');
var _ = require('lodash');
var async = require('async');
var root = require('path').join(__dirname, '../..');
var browserify = require('browserify');
var pkg = require(root + '/package.json');
var unitSpecDir = root + '/test/unit/specs';
var browserBuildsDir = root + '/test/unit/browser_builds';
var testFiles = {};
testFiles.unit = _(fs.readdirSync(unitSpecDir))
.difference([
'file_logger.js',
'http_connector.js',
'stdio_logger.js',
'console_logger.js',
'stream_logger.js',
'tracer_logger.js',
'transport_with_server.js',
])
.map(function(file) {
return unitSpecDir + '/' + file;
})
.value();
testFiles.build = fs
.readdirSync(browserBuildsDir)
.map(function(file) {
if (file.substr(-3) === '.js') {
return browserBuildsDir + '/' + file;
}
return null;
})
.filter(Boolean);
// generic aliasify instance
var aliasify = require('aliasify').configure({
aliases: pkg.browser,
excludeExtensions: 'json',
// verbose: false,
configDir: root,
});
// queue for bundle requests, two at a time
var bundleQueue = async.queue(function(task, done) {
task(done);
}, 2);
// create a route that bundles a file list, based on the patterns defined in testFiles
function bundleTests(name) {
return function(req, res, next) {
bundleQueue.push(function(_cb) {
var done = function(err) {
if (err) {
return next(err);
}
_cb(err);
};
res.set('Content-Type', 'application/javascript');
var b = browserify(testFiles[name], {
insertGlobals: true,
});
b.transform(aliasify);
var str = b.bundle();
str.pipe(res);
str.once('end', done);
str.once('error', done);
});
};
}
// create a route that just rends a specific file (like a symlink or something)
function sendFile(file) {
return function(req, res) {
res.sendfile(file);
};
}
var app = express();
app
.use(app.router)
// runners
.get('/unit.html', sendFile(root + '/test/browser_unit_tests.html'))
.get('/builds.html', sendFile(root + '/test/browser_build_unit_tests.html'))
// support
.get('/expect.js', sendFile(root + '/node_modules/expect.js/index.js'))
.get('/mocha.css', sendFile(root + '/node_modules/mocha/mocha.css'))
.get('/mocha.js', sendFile(root + '/node_modules/mocha/mocha.js'))
.get(
'/screencast-reporter.css',
sendFile(
root + '/node_modules/mocha-screencast-reporter/screencast-reporter.css'
)
)
.get(
'/screencast-reporter.js',
sendFile(
root + '/node_modules/mocha-screencast-reporter/screencast-reporter.js'
)
)
// libs
.get('/angular.js', sendFile(root + '/bower_components/angular/angular.js'))
.get(
'/angular-mocks.js',
sendFile(root + '/bower_components/angular-mocks/angular-mocks.js')
)
.get('/jquery.js', sendFile(root + '/node_modules/jquery/dist/jquery.js'))
// builds
.get('/elasticsearch.js', sendFile(root + '/dist/elasticsearch.js'))
.get(
'/elasticsearch.angular.js',
sendFile(root + '/dist/elasticsearch.angular.js')
)
.get(
'/elasticsearch.jquery.js',
sendFile(root + '/dist/elasticsearch.jquery.js')
)
// bundles
.get('/unit_tests.js', bundleTests('unit'))
.get('/build_tests.js', bundleTests('build'));
http.createServer(app).listen(8000, function() {
console.log('listening on port 8000');
});

View File

@ -1,8 +0,0 @@
// I know this is horrible
// I just don't want the keys searchable on github
module.exports = JSON.parse(
new Buffer(
'eyJ1c2VyIjoiZWxhc3RpY3NlYXJjaC1qcyIsImtleSI6IjI0ZjQ5ZTA3LWQ4MmYtNDA2Ny04NTRlLWQ4MTVlYmQxNWU0NiJ9',
'base64'
).toString('utf8')
);