This commit is contained in:
Spencer Alger
2015-01-06 07:08:17 -07:00
parent 4fc1c91634
commit 93ea7a3fac
13 changed files with 439 additions and 376 deletions

View File

@ -5,8 +5,8 @@ env:
- ES_BRANCH=1.4 ES_RELEASE=1.4.2 COVERAGE=1
- ES_BRANCH=1.3 ES_RELEASE=1.3.7 NODE_UNIT=0
- ES_BRANCH=1.2 ES_RELEASE=1.2.4 NODE_UNIT=0
- NODE_UNIT=0 NODE_INTEGRATION=0 BROWSER_UNIT=1
script: ./scripts/ci.sh
- ES_BRANCH=1.4 NODE_UNIT=0 NODE_INTEGRATION=0 BROWSER_UNIT=1
script: node ./scripts/ci.js
email:
recipients:
- spencer.alger@elasticsearch.com

67
grunt/config/esvm.js Normal file
View File

@ -0,0 +1,67 @@
var utils = require('../utils');
var _ = require('lodash-node');
var defaultOpts = exports.options = {
nodes: 1,
quiet: true,
config: {
'node.name': 'elasticsearch_js_test_runner',
'cluster.name': 'elasticsearch_js_test_runners',
'http.port': 9400,
'network.host': 'localhost',
'discovery.zen.ping_timeout': 1,
'discovery.zen.ping.multicast.enabled': false
}
};
function setBranchConfig(branch, target) {
switch (branch) {
case '0.90':
case '1.0':
case '1.1':
// no special treatment
break;
default:
target.options.config = _.merge({
'node.bench': true,
'script.disable_dynamic': false
}, defaultOpts.config);
break;
}
}
// targets for each branch
utils.branches.forEach(function (branch) {
exports[branch] = {
options: {
branch: branch
}
};
setBranchConfig(branch, exports[branch]);
});
// ci target, based on env variables
(function () {
var release = process.env.ES_RELEASE;
var branch = process.env.ES_BRANCH;
if (release) {
exports.ci_env = { options: { version: release } };
var versions = process.env.ES_RELEASE.split('.');
var major = versions.shift();
var minor = versions.shift();
branch = major + '.' + minor;
}
if (!release && branch) {
exports.ci_env = { options: { branch: branch } };
}
if (!branch) {
return;
}
setBranchConfig(branch, exports.ci_env);
}());

View File

@ -1,13 +1,3 @@
var esOpts = [
'-D es.http.port=9400',
'-D es.network.host=localhost',
'-D es.cluster.name=elasticsearch_js_test_runners',
'-D es.node.name=elasticsearch_js_test_runner',
'-D es.discovery.zen.ping.multicast.enabled=false',
'-D es.discovery.zen.ping_timeout=1',
'-D es.logger.level=ERROR',
];
var utils = require('../utils');
var config = {
@ -63,37 +53,9 @@ var config = {
};
utils.branches.forEach(function (branch) {
config['generate_' + branch] = {
exec: 'node ./scripts/generate/index.js --branch=' + branch
};
config['install_es_' + branch] = {
exec: './scripts/es.sh install ' + branch,
};
var args = esOpts.slice(0);
switch (branch) {
case '0.90':
args.push('-f');
break;
case '1.0':
case '1.1':
// no special treatment
break;
default:
args.push('-Des.node.bench=true', '-Des.script.disable_dynamic=false');
break;
}
config['es_' + branch] = {
exec: './.snapshots/' + branch + '_nightly/bin/elasticsearch ' + args.join(' '),
options: {
wait: false,
quiet: true
}
};
});
module.exports = config;

View File

@ -21,10 +21,9 @@ module.exports = function (grunt) {
branches.forEach(function (branch) {
tasks.push(
'run:install_es_' + branch,
'run:es_' + branch,
'esvm:' + branch,
'mochacov:integration_' + branch,
'stop:es_' + branch
'esvm_shutdown:' + branch
);
});
@ -72,6 +71,7 @@ module.exports = function (grunt) {
writeFile(browserBuildsPath, browserBuilds),
writeFile(packagePath, JSON.stringify(pkg, null, ' '))
]);
}).nodeify(this.async());
})
.nodeify(this.async());
});
};

View File

@ -38,13 +38,14 @@
"async": "~0.8.0",
"blanket": "~1.1.5",
"browserify": "~2.35.1",
"event-stream": "~3.1.0",
"chalk": "~0.5.1",
"expect.js": "~0.2.0",
"express": "~3.4.7",
"find-root": "~0.1.1",
"glob": "~3.2.7",
"grunt": "~0.4.1",
"grunt-browserify": "~1.2.11",
"grunt-cli": "~0.1.13",
"grunt-contrib-clean": "~0.5.0",
"grunt-contrib-compress": "~0.5.3",
"grunt-contrib-concat": "~0.3.0",
@ -52,6 +53,7 @@
"grunt-contrib-jshint": "spenceralger/grunt-contrib-jshint",
"grunt-contrib-uglify": "~0.2.7",
"grunt-contrib-watch": "~0.5.3",
"grunt-esvm": "~0.2.3",
"grunt-mocha-cov": "~0.2.0",
"grunt-open": "~0.2.2",
"grunt-prompt": "~0.1.2",
@ -70,7 +72,10 @@
"open": "0.0.4",
"optimist": "~0.6.0",
"semver": "~4.1.0",
"sinon": "~1.7.3",
"sinon": "~1.12.2",
"split": "~0.3.2",
"through2": "~0.6.3",
"through2-map": "~1.4.0",
"xmlbuilder": "~0.4.3"
},
"license": "Apache 2.0",
@ -78,6 +83,7 @@
"bluebird": "~2.2.2",
"chalk": "~0.5.1",
"forever-agent": "~0.5.2",
"grunt-esvm": "~0.2.1",
"lodash-node": "~2.4"
},
"repository": {
@ -91,4 +97,4 @@
"engines": {
"node": ">=0.8 <0.11"
}
}
}

View File

@ -1,15 +1,37 @@
module.exports = _spawn;
var estream = require('event-stream');
var map = require('through2-map');
var split = require('split');
var chalk = require('chalk');
var spawn = require('child_process').spawn;
var path = require('path');
var root = path.resolve(__dirname, '../');
function indent(line) {
line = String(line).trim();
return line ? ' ' + line + '\n' : '';
}
function consume(stream) {
stream
.pipe(split())
.pipe(map(indent))
.pipe(process.stdout, { end: false });
}
function _spawn(cmd, args, opts, cb) {
opts = opts || {};
if (cmd === 'rm') {
opts.verbose = false;
}
var conf = {
stdio: 'pipe'
stdio: [
'ignore',
opts.verbose ? 'pipe' : 'ignore',
'pipe'
]
};
var subdir;
@ -18,24 +40,16 @@ function _spawn(cmd, args, opts, cb) {
conf.cwd = opts.cwd;
subdir = path.relative(root, opts.cwd);
}
console.log(chalk.white.bold((subdir ? subdir + ' ' : '') + '$ ') + cmd + ' ' + args.join(' '));
var cp = spawn(cmd, args, opts);
var split = estream.split();
var cp = spawn(cmd, args, conf);
if (opts.verbose) {
cp.stdout.pipe(split);
} else {
cp.stdout.resume();
consume(cp.stdout);
}
cp.stderr.pipe(split);
split
.pipe(estream.mapSync(function indent(line) {
return line ? ' ' + line + '\n' : '';
}))
.pipe(process.stdout);
consume(cp.stderr);
if (typeof cb === 'function') {
cp.on('exit', cb);

View File

@ -1,171 +0,0 @@
#!/usr/bin/env bash
#####
# Start a group of log output
#####
function group {
style='\x1b[1m\x1b[37m\x1b[4m'
reset='\x1b[24m\x1b[39m\x1b[22m'
echo -en "\n\n${style}${1}${reset}\n"
}
#####
# Do, log, and check a call
#####
function call {
local DO="$*"
echo "\$ ${DO}"
echo "$DO" | bash
local RESULT=$?
if [ "$RESULT" -gt "0" ]; then
echo "non-zero exit code: $RESULT"
exit $RESULT
fi
}
function ensure_grunt {
if [[ ! -x "$(which grunt)" ]]; then
group "installing grunt"
call npm install --silent -g grunt-cli
fi
}
#####
# call grunt, but make sure it's installed first
#####
function _grunt {
ensure_grunt
call grunt "$*"
}
#####
# Download a version of ES and get it running
# @arg ES_BRANCH - The branch to run off of
# @arg ES_RELEASE - The specific release to run, overrides ES_BRANCH
#####
function manage_es {
local DO=$1
local ES_BRANCH=$2
local ES_RELEASE=$3
local ROOT="$PWD"
local SNAPSHOTS="$ROOT/.snapshots"
local PIDS="$ROOT/.snapshots/pids"
group "${DO}ing es"
if [ ! -d "$PIDS" ]; then
call mkdir -p "$PIDS"
fi
if [ -n "$ES_RELEASE" ]; then
local ES_VERSION="v${ES_RELEASE}"
local ES_URL="https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ES_RELEASE}.zip"
local ES_DIR="${SNAPSHOTS}/${ES_VERSION}"
else
local BUCKET='s3-us-west-2.amazonaws.com/build.elasticsearch.org'
# TODO: TRASH THIS
if [[ $ES_BRANCH == "1.x" ]]; then local JDK='JDK7'
elif [[ $ES_BRANCH == "1.2" ]]; then local JDK='JDK7'
elif [[ $ES_BRANCH == "1.3" ]]; then local JDK='JDK7'
elif [[ $ES_BRANCH == "1.4" || $ES_BRANCH == "master" ]]; then
local JDK='JDK7'
local BUCKET='s3-eu-west-1.amazonaws.com/build-eu.elasticsearch.org'
else local JDK='JDK6'
fi
local ES_VERSION="${ES_BRANCH}_nightly"
local ES_URL="http://$BUCKET/origin/$ES_BRANCH/nightly/$JDK/elasticsearch-latest-SNAPSHOT.zip"
local DATE="$(date +%Y_%m_%d)"
local ES_DIR="${SNAPSHOTS}/${ES_VERSION}_${DATE}"
fi
local ES_BIN="$ES_DIR/bin/elasticsearch"
local PIDFILE="$ROOT/.snapshots/pids/$ES_VERSION"
case "$DO" in
reinstall)
if [ -x "$ES_BIN" ]; then
echo "removing $ES_VERSION"
rm -rf "${SNAPSHOTS}/${ES_VERSION}*"
fi
manage_es install "$ES_BRANCH" "$ES_RELEASE"
;;
install)
if [ ! -x "$ES_BIN" ]; then
echo "Downloading Elasticsearch $ES_VERSION"
rm -rf "${SNAPSHOTS}/${ES_VERSION}*"
call curl --silent -O "$ES_URL"
unzip -q elasticsearch-*.zip
rm elasticsearch-*.zip
mv elasticsearch-*/ "$ES_DIR"
if [ -z "$ES_RELEASE" ]; then
ln -sf "$ES_DIR" "${SNAPSHOTS}/${ES_VERSION}"
fi
else
echo "$ES_VERSION installed"
fi
;;
start)
# ensure that only one version is running at a time so that we can precisely kill them
if [ -f "$PIDFILE" ]; then
local PID="$(cat "$PIDFILE")"
kill -0 "$PID"
local RUNNING=$?
if [ $RUNNING -eq 0 ]; then
echo "Already running $ES_VERSION"
return 1
else
echo "PID file was left behind by ES"
rm "$PIDFILE"
fi
fi
manage_es install "$ES_BRANCH" "$ES_RELEASE"
if [ ! -x "$ES_BIN" ]; then
echo "Unable to find elasticsearch executable"
return 1
fi
local ES_OPTS="-p $PIDFILE -D es.http.port=9400 -D es.network.host=localhost -D es.cluster.name=elasticsearch_js_test_runners -D es.node.name=elasticsearch_js_test_runner -D es.discovery.zen.ping.multicast.enabled=false -D es.discovery.zen.ping_timeout=1 -D es.logger.level=ERROR"
if [ -n "$ES_NODE_NAME" ]; then
ES_OPTS="$ES_OPTS -Des.node.name=$ES_NODE_NAME"
fi
if [[ $ES_BRANCH != "0.90" ]]; then
# explicitly run as deamon
ES_OPTS="-d $ES_OPTS"
fi
if [[ $ES_BRANCH != "1.0" && $ES_BRANCH != "1.1" ]]; then
# enable scripting and benchmarks
ES_OPTS="$ES_OPTS -D es.node.bench=true -D es.script.disable_dynamic=false"
fi
call "$ES_BIN" "$ES_OPTS"
;;
stop)
if [ -e "$PIDFILE" ]; then
local PID="$(cat "$PIDFILE")"
kill -0 "$PID"
local RUNNING=$?
if [ $RUNNING -eq 0 ]; then
kill "$PID"
echo "Elasticsearch $ES_VERSION stopped"
return 0
fi
rm "$PIDFILE"
fi
echo "Elasticsearch $ES_VERSION is not running."
return 1
;;
esac
}

326
scripts/ci.js Normal file
View File

@ -0,0 +1,326 @@
/**
* Run the tests, and setup es if needed
*
* ENV VARS:
* ES_V - a version identifier used by jenkins. don't use this
* ES_BRANCH - the ES branch we should use to generate the tests and download es
* ES_RELEASE - a specific ES release to download in use for testing
* NODE_UNIT=1 - 0/1 run the unit tests in node
* NODE_INTEGRATION=1 - 0/1 run the integration tests in node
* BROWSER_UNIT - the browser to test in using, sauce labs. One of 'ie', 'firefox', 'chrome'
* COVERAGE - 0/1 check for coverage and ship it to coveralls
*******/
var Promise = require('bluebird');
var _ = require('lodash-node');
var through2 = require('through2');
var map = require('through2-map');
var split = require('split');
var join = require('path').join;
var fs = require('fs');
var child_process = require('child_process');
var chalk = require('chalk');
var format = require('util').format;
var ROOT = join(__dirname, '..');
var GRUNT = join(ROOT, './node_modules/.bin/grunt');
var MOCHA = join(ROOT, './node_modules/.bin/mocha');
var MOCHA_REPORTER = 'test/utils/jenkins-reporter.js';
var ENV = _.clone(process.env);
var JENKINS = !!ENV.JENKINS_HOME;
/******
* SETUP
******/
var taskChain = Promise.resolve();
var output; // main output stream
var taskOut; // task output stream
/******
* GET VERSION
******/
task(
'read version from environment',
true,
function () {
function read() {
if (ENV.ES_V) {
var match;
if (match = ENV.ES_V.match(/^(.*)_nightly$/)) {
return [match[1], null];
}
if (match = ENV.ES_V.match(/^(1\.\d+|0\.90)\..*$/)) {
return [match[1], ENV.ES_V];
}
throw new Error('unable to parse ES_V ' + ENV.ES_V);
}
if (ENV.ES_BRANCH) {
return [ENV.ES_BRANCH, ENV.ES_RELEASE || null];
}
}
var ver = read();
if (!ver) {
throw new Error('Unable to run the ci script without at least an ES_BRANCH environment var.');
}
if (ver[0]) {
taskOut.write('branch: ' + (ENV.ES_BRANCH = ver[0]) + '\n');
} else {
delete ENV.ES_BRANCH;
}
if (ver[1]) {
taskOut.write('release: ' + (ENV.ES_RELEASE = ver[1]) + '\n');
} else {
delete ENV.ES_RELEASE;
}
}
);
task(
'node unit tests',
ENV.NODE_UNIT !== '0',
function () {
if (!JENKINS) {
return grunt('jshint', 'mochacov:unit');
}
var report = join(ROOT, 'test/junit-node-unit.xml');
var tests = join(ROOT, 'test/unit/index.js');
return mocha(report, tests, '--reporter', join(ROOT, MOCHA_REPORTER));
}
);
task(
'node integration tests',
ENV.NODE_INTEGRATION !== '0',
function () {
var branch = ENV.ES_BRANCH;
return node('scripts/generate', '--no-api', '--branch', branch)
.then(function () {
if (JENKINS) return;
return grunt('esvm:ci_env', 'mochacov:integration_' + branch, 'esvm_shutdown:ci_env');
})
.then(function () {
if (!JENKINS) return;
var branchSuffix = '_' + branch.replace(/\./g, '_');
var tests = 'test/integration/yaml_suite/index' + branchSuffix + '.js';
var esPort = ENV.es_port || 9200;
var report = 'test/junit-node-integration.xml';
return mocha(report, tests, '--host', 'localhost', '--port', esPort, '--reporter', MOCHA_REPORTER);
});
}
);
task(
'browser unit tests',
ENV.BROWSER_UNIT === '1',
function () {
return new Promise(function (resolve, reject) {
// build the clients and start the server, once the server is ready call trySaucelabs()
var serverTasks = ['browser_clients:build', 'run:browser_test_server:keepalive'];
spawn(GRUNT, serverTasks, function (cp) {
var stdout = cp.stdout;
var lines = split();
var findReady = through2(function (line, enc, cb) {
cb();
line = String(line);
if (line.indexOf('run:browser_test_server') === -1) return;
trySaucelabs()
.finally(function () {
cp.kill();
})
.then(resolve, reject);
stdout.unpipe(lines);
lines.end();
});
stdout.pipe(lines).pipe(findReady);
});
// attempt to run tests on saucelabs and retry if it fails
var saucelabsAttempts = 0;
function trySaucelabs() {
saucelabsAttempts++;
return new Promise(function (resolve, reject) {
spawn(GRUNT, ['saucelabs-mocha'], function (cp) {
var failedTests = 0;
cp.stdout
.pipe(split())
.pipe(map(function (line) {
line = String(line);
if (line.trim() === 'Passed: false') {
failedTests ++;
}
}));
cp.on('error', reject);
cp.on('exit', function (code) {
if (code > 0) {
if (failedTests > 0) {
return reject(new Error('Browser tests failed'));
}
if (saucelabsAttempts >= 3) {
return reject(new Error('Saucelabs is like really really down. Tried 3 times'));
}
taskOut.write(chalk.blue('trying saucelabs again...'));
return trySaucelabs().then(resolve, reject);
}
return resolve();
});
})
// swallow spawn() errors
.then(_.noop, _.noop);
});
}
});
}
);
task(
'code coverage',
ENV.COVERAGE === '1',
function () {
return grunt('mochacov:ship_coverage')
.catch(function () {
taskOut.write('FAILED TO SHIP COVERAGE! but that\'s okay\n');
});
}
);
/******
* FINISH
*/
taskChain
.finally(function () {
// output directly to stdout
output = process.stdout;
})
.then(function () {
logImportant(chalk.bold.green('✔︎ SUCCESS'));
})
.catch(function (e) {
logImportant(chalk.bold.red('✗ FAILURE\n\n' + e.stack));
// override process exit code once it is ready to close
process.once('exit', function () {
process.exit(1);
});
});
/******
* utils
******/
function log() {
var chunk = format.apply(null, arguments);
output.write(chunk + '\n');
}
function logImportant(text) {
log('\n------------');
log(text);
log('------------\n');
}
function indent(line) {
line = String(line).trim();
return line ? ' ' + line + '\n' : '';
}
function task(name, condition, block) {
if (!condition) return;
taskChain = taskChain.then(function () {
taskOut = through2();
output = through2();
taskOut
.pipe(split())
.pipe(map(indent))
.pipe(output);
output
.pipe(process.stdout, { end: false });
log(chalk.white.underline(name));
function flushTaskOut() {
return new Promise(function (resolve) {
// wait for the taskOut to finish writing before continuing
output.once('finish', function () {
process.stdout.write('\n');
resolve();
});
taskOut.end(); // will end output as well
taskOut = output = null;
});
}
return Promise.try(block).finally(flushTaskOut);
});
}
function spawn(file, args, block) {
return new Promise(function (resolve, reject) {
var cp = child_process.spawn(file, args, {
cwd: ROOT,
env: ENV,
stdio: [0, 'pipe', 'pipe']
});
cp.stdout.pipe(taskOut, { end: false });
cp.stderr.pipe(taskOut, { end: false });
var stdout = '';
cp.stdout
.pipe(through2(function (chunk, enc, cb) {
stdout += chunk;
cb();
}));
block && block(cp);
cp.on('exit', function (code) {
if (code > 0) {
reject(new Error('non-zero exit code: ' + code));
} else {
resolve(stdout);
}
});
cp.on('error', function (origErr) {
reject(new Error('Unable to execute "' + file + ' ' + args.join(' ') + '": ' + origErr.message));
});
});
}
function node(/*args... */) {
return spawn(process.execPath, _.toArray(arguments));
}
function grunt(/* args... */) {
return spawn(GRUNT, _.toArray(arguments));
}
function mocha(report/*, args... */) {
return spawn(MOCHA, _.rest(arguments, 1), function (cp) {
cp.stderr.unpipe();
cp.stderr.pipe(fs.createWriteStream(report));
});
}

View File

@ -1,83 +0,0 @@
#!/usr/bin/env bash
###########
# Run the tests, and setup es if needed
#
# ENV VARS:
# ES_BRANCH - the ES branch we should use to generate the tests and download es
# ES_RELEASE - a specific ES release to download in use for testing
# NODE_UNIT=1 - 0/1 run the unit tests in node
# NODE_INTEGRATION=1 - 0/1 run the integration tests in node
# BROWSER_UNIT - the browser to test in using, sauce labs. One of 'ie', 'firefox', 'chrome'
# COVERAGE - 0/1 check for coverage and ship it to coveralls
#
###########
export ES_NODE_NAME="elasticsearch_js_test_runner"
HERE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
MOCHA="./node_modules/.bin/mocha"
MOCHA_REPORTER="../../../test/utils/jenkins-reporter.js"
source $HERE/_utils.sh
# normalize ES_BRANCH into TESTING_BRANCH
if [[ -n "$ES_BRANCH" ]]; then
TESTING_BRANCH=$ES_BRANCH
else
TESTING_BRANCH="master"
fi
if [[ "$NODE_UNIT" != "0" ]]; then
group "running unit tests"
if [[ -n "$JENKINS" ]]; then
$MOCHA test/unit/index.js --reporter $MOCHA_REPORTER 2> test/junit-node-unit.xml
if [ "$?" -gt "0" ]; then
echo "non-zero exit code: $RESULT"
cat test/junit-node-unit.xml
fi
else
_grunt jshint mochacov:unit
fi
fi
if [[ "$NODE_INTEGRATION" != "0" ]]; then
group "generating tests"
call node scripts/generate --no-api --branch $TESTING_BRANCH
group "running integration tests"
if [[ -n "$JENKINS" ]]; then
# convert TESTING_BRANCH into BRANCH_SUFFIX
BRANCH_SUFFIX="_${TESTING_BRANCH//./_}"
# find value of ES_PORT
if [[ -n "$es_port" ]]; then
# jenkins
ES_PORT=$es_port
else
ES_PORT=9200
fi
FILES=test/integration/yaml_suite/index${BRANCH_SUFFIX}.js
$MOCHA $FILES --host localhost --port $ES_PORT --reporter $MOCHA_REPORTER 2> test/junit-node-integration.xml
if [ "$?" -gt "0" ]; then
echo "non-zero exit code: $RESULT"
cat test/junit-node-unit.xml
fi
else
manage_es start $TESTING_BRANCH $ES_RELEASE
_grunt mochacov:integration_$TESTING_BRANCH
manage_es stop $TESTING_BRANCH $ES_RELEASE
fi
fi
if [[ "$BROWSER_UNIT" == "1" ]]; then
group "running browser tests"
_grunt browser_clients:build run:browser_test_server saucelabs-mocha
fi
if [[ "$COVERAGE" == "1" ]]; then
group "shipping coverage"
# don't fail even if this does
_grunt --force mochacov:ship_coverage
fi

View File

@ -1,26 +0,0 @@
#!/usr/bin/env bash
if [ -z "$2" ]; then
echo "Usage:
From the root of the elasticsearch-js project call:
Start nightly:
./scripts/es.sh start master
Stop 0.90 branch:
./scripts/es.sh stop 0.90
Start relase version 0.90.7:
./scripts/es.sh start 0.90 0.90.7
"
exit 1
fi
source scripts/_utils.sh
if [[ -z "$ES_NODE_NAME" ]]; then
export ES_NODE_NAME="elasticsearch_js_test_runner"
fi
manage_es $*
exit $?

View File

@ -1,32 +0,0 @@
#!/usr/bin/env bash
export VERBOSE="true"
export JENKINS="true"
HERE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
set +x
if [[ -z "$ES_V" ]]; then
echo "you must set the ES_V environment variable to use this script"
exit 1
fi
re_nightly='^(.*)_nightly$';
re_090='^0\.90\..*$';
re_1x='^1\.([0-9]+)\..*$';
if [[ "$ES_V" =~ $re_nightly ]]; then
export ES_BRANCH=${BASH_REMATCH[1]}
elif [[ "$ES_V" =~ $re_090 ]]; then
export ES_BRANCH='0.90'
export ES_RELEASE=$ES_V
elif [[ "$ES_V" =~ $re_1x ]]; then
export ES_BRANCH="1.${BASH_REMATCH[1]}"
export ES_RELEASE=$ES_V
else
echo "unable to parse ES_V $ES_V"
exit 1
fi
echo "ES_BRANCH = $ES_BRANCH , ES_RELEASE = $ES_RELEASE"
source $HERE/ci.sh

View File

@ -18,7 +18,6 @@ describe('Http Connector', function () {
var MockRequest = require('../../mocks/request');
var MockIncommingMessage = require('../../mocks/incomming_message');
var zlib = require('zlib');
var estr = require('event-stream');
nock.disableNetConnect();

View File

@ -6,7 +6,7 @@ var expect = require('expect.js');
var sinon = require('sinon');
var nock = require('../../mocks/server.js');
var estr = require('event-stream');
var through2 = require('through2');
var _ = require('lodash-node');
var nodeList = require('../../fixtures/short_node_list.json');
var stub = require('../../utils/auto_release_stub').make();
@ -311,9 +311,10 @@ describe('Transport + Mock server', function () {
var serverMock = nock('http://esbox.1.com')
.get('/')
.reply(200, function () {
var str = estr.readable(function (count, cb) {
var str = through2(function (chunk, enc, cb) {
cb(new Error('force error'));
});
str.setEncoding = function () {}; // force nock's isStream detection
return str;
});