Summary of Changes:

- moved es install/start/stop logic into a seperate script
  - `grunt test` now runs the integration tests once for each version of ES we support
  - grunt can now install and run elasticearch (using grunt-run, pure js solution coming later)
  - included seperate es.sh script specifically for starting or stopping elasticsearch
  - url aliases, api, yaml_suite/index.js, and yaml_tests.json, are all now duplicated for 0_90 support
  - the client now accepts an apiVersion argument (undocumented) which defaults to 'master' but can be '0.90'
  - The yaml test runner will now check the name of the ES instance it is connecting to, preventing accidental wiping of ES
This commit is contained in:
Spencer Alger
2014-01-14 23:10:12 -07:00
parent 37ce4e440c
commit 18e134d0a6
30 changed files with 23318 additions and 560 deletions

126
scripts/_utils.sh Normal file
View File

@ -0,0 +1,126 @@
#####
# Start or stop a group for travis
#####
function group {
if [ -n "$TRAVIS" ]; then
echo -e "travis_fold:$1"
fi
}
#####
# Do, log, and check a call
#####
function call {
local DO="$*"
echo "\$ ${DO}"
echo $DO | bash
local RESULT=$?
if [ "$RESULT" -gt "0" ]; then
echo "non-zero exit code: $RESULT"
exit $RESULT
fi
}
#####
# Download a version of ES and get it running
# @arg ES_BRANCH - The branch to run off of
# @arg ES_RELEASE - The specific release to run, overrides ES_BRANCH
#####
function manage_es {
local DO=$1
local ES_BRANCH=$2
local ES_RELEASE=$3
local ROOT="$PWD"
local ES_SUBMODULE="$ROOT/src/elasticsearch"
local SNAPSHOTS="$ROOT/.snapshots"
local PIDS="$ROOT/.snapshots/pids"
group "start:$DO es"
if [ ! -d "$PIDS" ]; then
call mkdir -p $PIDS
fi
if [ -n "$ES_RELEASE" ]; then
local ES_VERSION="v${ES_RELEASE}"
local ES_URL="https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ES_RELEASE}.zip"
local ES_DIR="${SNAPSHOTS}/${ES_VERSION}"
else
local ES_VERSION="${ES_BRANCH}_nightly"
local ES_URL="http://s3-us-west-2.amazonaws.com/build.elasticsearch.org/origin/$ES_BRANCH/nightly/JDK6/elasticsearch-latest-SNAPSHOT.zip"
local DATE=`date +%Y_%m_%d`
local ES_DIR="${SNAPSHOTS}/${ES_VERSION}_${DATE}"
fi
local ES_BIN="$ES_DIR/bin/elasticsearch"
local PIDFILE="$ROOT/.snapshots/pids/$ES_VERSION"
case "$DO" in
install)
if [ ! -d "$ES_DIR" ]; then
echo "Downloading Elasticsearch $ES_VERSION"
call rm -rf ${SNAPSHOTS}/${ES_VERSION}*
call curl -#O $ES_URL
unzip -q elasticsearch-*.zip
rm elasticsearch-*.zip
mv elasticsearch-*/ $ES_DIR
if [ -z "$ES_RELEASE" ]; then
ln -sf $ES_DIR "${SNAPSHOTS}/${ES_VERSION}"
fi
fi
;;
start)
# ensure that only one version is running at a time so that we can precisely kill them
if [ -f $PIDFILE ]; then
local PID=`cat $PIDFILE`
kill -0 $PID
local RUNNING=$?
if [ $RUNNING -eq 0 ]; then
echo "Already running $ES_VERSION"
exit 1
else
echo "PID file was left behind by ES"
call rm $PIDFILE
fi
fi
./scripts/es.sh install $ES_BRANCH $ES_RELEASE
if [ ! -x "$ES_BIN" ]; then
echo "Unable to find elasticsearch executable"
exit 1
fi
local ES_OPTS="-p $PIDFILE -Des.network.host=localhost -Des.discovery.zen.ping.multicast.enabled=false -Des.discovery.zen.ping_timeout=1"
if [ "$ES_BRANCH" = "0.90" ]; then
echo "Starting Elasticsearch $ES_VERSION"
call $ES_BIN $ES_OPTS
else
echo "Starting Elasticsearch $ES_VERSION as a deamon"
call $ES_BIN -d $ES_OPTS
fi
;;
stop)
if [ -e $PIDFILE ]; then
local PID=`cat $PIDFILE`
kill -0 $PID
local RUNNING=$?
if [ $RUNNING -eq 0 ]; then
kill $PID
echo "Elasticsearch $ES_VERSION stopped"
exit 0
fi
rm $PIDFILE
fi
echo "Elasticsearch $ES_VERSION is not running."
exit 1
;;
esac
group "end:$DO es"
}

View File

@ -15,28 +15,7 @@
#
###########
#####
# Start or stop a group for travis
#####
function group {
if [ -n "$TRAVIS" ]; then
echo -e "travis_fold:$1"
fi
}
#####
# Do, log, and check a call
#####
function call {
local DO="$*"
echo "\$ ${DO}"
echo $DO | bash
local RESULT=$?
if [ "$RESULT" -gt "0" ]; then
echo "non-zero exit code: $RESULT"
exit $RESULT
fi
}
source _utils.sh
#####
# call grunt, but make sure it's installed first
@ -54,72 +33,6 @@ function grunt_ {
call grunt $DO
}
#####
# Download a version of ES and get it running
# @arg ES_BRANCH - The branch to run off of
# @arg ES_RELEASE - The specific release to run, overrides ES_BRANCH
#####
function get_es {
group "start:setup_es"
local ES_BRANCH=$1
local ES_RELEASE=$2
local ROOT="$PWD"
local ES_SUBMODULE="$ROOT/src/elasticsearch"
local SNAPSHOTS="$ROOT/.snapshots"
if [ ! -d "$SNAPSHOTS" ]; then
mkdir -p $SNAPSHOTS
fi
if [ -n "$ES_RELEASE" ]; then
local ES_VERSION="v${ES_RELEASE}"
local ES_URL="https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ES_RELEASE}.zip"
local ES_DIR="${SNAPSHOTS}/${ES_VERSION}"
else
local ES_VERSION="${ES_BRANCH}_nightly"
local ES_URL="http://s3-us-west-2.amazonaws.com/build.elasticsearch.org/origin/$ES_BRANCH/nightly/JDK6/elasticsearch-latest-SNAPSHOT.zip"
local DATE=`date +%Y_%m_%d`
local ES_DIR="${SNAPSHOTS}/${ES_VERSION}_${DATE}"
if [ ! -d $ES_DIR ]; then
call rm -rf ${SNAPSHOTS}/${ES_VERSION}*
fi
fi
local ES_BIN="$ES_DIR/bin/elasticsearch"
call cd $SNAPSHOTS
if [ ! -d "$ES_DIR" ]; then
echo "Downloading Elasticsearch $ES_VERSION"
call curl -#O $ES_URL
unzip -q elasticsearch-*.zip
rm elasticsearch-*.zip
mv elasticsearch-*/ $ES_DIR
fi
call cd $ROOT
if [ ! -x "$ES_BIN" ]; then
echo "Unable to find elasticsearch executable"
exit 1
fi
if [ "$ES_BRANCH" = "0.90" ]; then
echo "Starting Elasticsearch $ES_VERSION"
call $ES_BIN \
-Des.network.host=localhost \
-Des.discovery.zen.ping.multicast.enabled=false \
-Des.discovery.zen.ping_timeout=1
else
echo "Starting Elasticsearch $ES_VERSION as a deamon"
call $ES_BIN -d \
-Des.network.host=localhost \
-Des.discovery.zen.ping.multicast.enabled=false \
-Des.discovery.zen.ping_timeout=1
fi
group "end:setup_es"
}
if [ -n "$ES_BRANCH" ]; then
TESTING_BRANCH=$ES_BRANCH
@ -135,15 +48,14 @@ fi
if [[ "$NODE_INTEGRATION" != "0" ]]; then
if [[ -n "$ES_BRANCH" ]] && [[ "$USER" != "jenkins" ]]; then
killall java 2>/dev/null
get_es $ES_BRANCH $ES_RELEASE
manage_es start $ES_BRANCH $ES_RELEASE
fi
call node scripts/generate --no-api --es_branch=\"$TESTING_BRANCH\"
grunt_ mochacov:integration
if [[ -n "$ES_BRANCH" ]] && [[ "$USER" != "jenkins" ]]; then
killall java 2>/dev/null
manage_es stop $ES_BRANCH $ES_RELEASE
fi
fi

19
scripts/es.sh Executable file
View File

@ -0,0 +1,19 @@
if [ -z "$2" ]; then
echo "Usage:
From the root of the elasticsearch-js project call:
Start nightly:
./scripts/es.sh start master
Stop 0.90 branch:
./scripts/es.sh stop 0.90
Start relase version 0.90.7:
./scripts/es.sh start 0.90 0.90.7
"
exit 1
fi
source scripts/_utils.sh
manage_es $*

View File

@ -0,0 +1,95 @@
module.exports = {
'cluster.nodeHotThreads': [
'/_cluster/nodes/hotthreads',
'/_cluster/nodes/hot_threads',
'/_nodes/hot_threads',
'/_cluster/nodes/{node_id}/hotthreads',
'/_cluster/nodes/{node_id}/hot_threads',
'/_nodes/{node_id}/hot_threads'
],
'cluster.nodeInfo': [
'/_cluster/nodes',
'/_nodes/settings',
'/_nodes/os',
'/_nodes/process',
'/_nodes/jvm',
'/_nodes/thread_pool',
'/_nodes/network',
'/_nodes/transport',
'/_nodes/http',
'/_nodes/plugin',
'/_cluster/nodes/{node_id}',
'/_nodes/{node_id}/settings',
'/_nodes/{node_id}/os',
'/_nodes/{node_id}/process',
'/_nodes/{node_id}/jvm',
'/_nodes/{node_id}/thread_pool',
'/_nodes/{node_id}/network',
'/_nodes/{node_id}/transport',
'/_nodes/{node_id}/http',
'/_nodes/{node_id}/plugin'
],
'cluster.nodeShutdown': [
'/_cluster/nodes/_shutdown'
],
'cluster.nodeStats': [
'/_cluster/nodes/stats',
'/_nodes/stats/{metric_family}',
'/_nodes/stats/indices/{metric}/{fields}',
'/_cluster/nodes/{node_id}/stats',
'/_nodes/{node_id}/stats/{metric_family}',
'/_nodes/{node_id}/stats/indices/{metric}/{fields}'
],
'get': [
'/{index}/{type}/{id}/_source'
],
'indices.deleteMapping': [
'/{index}/{type}',
'/{index}/_mapping/{type}',
'/{index}/{type}/_mappings',
'/{index}/_mappings/{type}'
],
'indices.putWarmer': [
// '/_warmer/{name}',
// '/{index}/_warmer/{name}',
// '/{index}/{type}/_warmer/{name}',
'/_warmers/{name}',
'/{index}/_warmers/{name}',
'/{index}/{type}/_warmers/{name}'
],
'indices.deleteWarmer': [
// '/{index}/_warmer/{name}',
'/{index}/_warmer',
'/{index}/_warmers',
'/{index}/_warmers/{name}'
],
'indices.deleteAlias': [
// '/{index}/_alias/{name}',
'/{index}/_aliases/{name}'
],
'indices.putAlias': [
// '/{index}/_alias/{name}',
// '/_alias/{name}',
'/{index}/_aliases/{name}',
'/_aliases/{name}'
],
'indices.putMapping': [
// '/{index}/_mapping/{type}',
// '/_mapping/{type}',
'/{index}/{type}/_mapping',
'/{index}/{type}/_mappings',
'/{index}/_mappings/{type}',
'/_mappings/{type}'
],
'indices.stats': [
'_stats/{metric_family}',
'/_stats/indexing',
'/_stats/indexing/{indexing_types}',
'/_stats/search/{search_groups}',
'/_stats/fielddata/{fields}',
'/{index}/_stats/{metric_family}',
'/{index}/_stats/indexing',
'/{index}/_stats/search/{search_groups}',
'/{index}/_stats/fielddata/{fields}'
]
};

View File

@ -0,0 +1,63 @@
module.exports = {
'cluster.nodeHotThreads': [
'/_cluster/nodes/hotthreads',
'/_cluster/nodes/hot_threads',
'/_nodes/hot_threads',
'/_cluster/nodes/{node_id}/hotthreads',
'/_cluster/nodes/{node_id}/hot_threads',
'/_nodes/{node_id}/hot_threads'
],
'cluster.nodeInfo': [
'/_cluster/nodes',
'/_nodes/settings',
'/_nodes/os',
'/_nodes/process',
'/_nodes/jvm',
'/_nodes/thread_pool',
'/_nodes/network',
'/_nodes/transport',
'/_nodes/http',
'/_nodes/plugin',
'/_cluster/nodes/{node_id}',
'/_nodes/{node_id}/settings',
'/_nodes/{node_id}/os',
'/_nodes/{node_id}/process',
'/_nodes/{node_id}/jvm',
'/_nodes/{node_id}/thread_pool',
'/_nodes/{node_id}/network',
'/_nodes/{node_id}/transport',
'/_nodes/{node_id}/http',
'/_nodes/{node_id}/plugin'
],
'cluster.nodeShutdown': [
'/_cluster/nodes/_shutdown'
],
'cluster.nodeStats': [
'/_cluster/nodes/stats',
'/_nodes/stats/{metric_family}',
'/_nodes/stats/indices/{metric}/{fields}',
'/_cluster/nodes/{node_id}/stats',
'/_nodes/{node_id}/stats/{metric_family}',
'/_nodes/{node_id}/stats/indices/{metric}/{fields}'
],
'get': [
'/{index}/{type}/{id}/_source'
],
'indices.deleteMapping': [
'/{index}/{type}/_mapping'
],
'indices.stats': [
'_stats/{metric_family}',
'/_stats/indexing',
'/_stats/indexing/{indexing_types}',
'/_stats/search/{search_groups}',
'/_stats/fielddata/{fields}',
'/{index}/_stats/{metric_family}',
'/{index}/_stats/indexing',
'/{index}/_stats/search/{search_groups}',
'/{index}/_stats/fielddata/{fields}'
],
'search': [
'/_search'
]
};

View File

@ -1,5 +1,7 @@
var cp = require('child_process');
var async = require('async');
var estream = require('event-stream');
var chalk = require('chalk');
var argv = require('optimist')
.options({
force: {
@ -39,56 +41,72 @@ if (!argv.force && process.env.FORCE || process.env.FORCE_GEN) {
argv.force = argv.f = process.env.FORCE || process.env.FORCE_GEN;
}
var branch = argv.es_branch;
// branch can be prefixed with = or suffixed with _nightly
if (branch.indexOf) {
['='].forEach(function removePrefix(pref) {
if (branch.indexOf(pref) === 0) {
branch = branch.substring(pref.length);
}
});
function spawn(cmd, args) {
console.log(chalk.white.bold('$ ' + cmd + ' ' + args.join(' ')));
['_nightly'].forEach(function removeSuffix(suf) {
if (branch.indexOf(suf) === branch.length - suf.length) {
branch = branch.substr(0, branch.length - suf.length);
}
});
var proc = cp.spawn(cmd, args, { stdio: 'pipe'});
var out = estream.split();
if (argv.verbose) {
proc.stdout.pipe(out);
} else {
proc.stdout.resume();
}
proc.stderr.pipe(out);
out
.pipe(estream.mapSync(function indent(line) {
return ' ' + line + '\n';
}))
.pipe(process.stdout);
return proc;
}
var stdio = [
'ignore',
argv.verbose ? process.stdout : 'ignore',
process.stderr
];
function generateBranch(branch, i, done) {
async.series([
function (done) {
if (i === 0) {
spawn('git', ['submodule', 'update', '--init'])
.on('exit', function (status) {
done(status ? new Error('Unable to init submodules.') : void 0);
});
return;
}
done();
},
function (done) {
spawn('git', ['submodule', 'foreach', [
'git fetch origin master', 'git reset --hard', 'git clean -fdx', 'git checkout origin/' + branch
].join(' && ')])
.on('exit', function (status) {
done(status ? new Error('Unable to checkout ' + branch) : void 0);
});
},
function (done) {
var tasks = [];
if (argv.api) {
tasks.push(
async.apply(require('./js_api'), branch)
);
}
if (argv.tests) {
tasks.push(
async.apply(require('./yaml_tests'), branch)
);
}
async.parallel(tasks, done);
}
], done);
}
async.series([
function (done) {
cp.spawn('git', ['submodule', 'update', '--init'], {
stdio: stdio
}).on('exit', function (status) {
done(status ? new Error('Unable to init submodules.') : void 0);
});
},
function (done) {
// checkout branch and clean it
cp.spawn('git', ['submodule', 'foreach', 'git fetch origin master && git checkout origin/' + branch + ' && git clean -f'], {
stdio: stdio
}).on('exit', function (status) {
done(status ? new Error('Unable to checkout ' + branch) : void 0);
});
},
function (done) {
var tasks = [];
if (argv.api) {
tasks.push(require('./js_api'));
}
if (argv.tests) {
tasks.push(require('./yaml_tests'));
}
async.parallel(tasks, done);
}
async.apply(generateBranch, 'master', 0),
async.apply(generateBranch, '0.90', 1)
], function (err) {
if (err) {
throw err;

View File

@ -1,6 +1,4 @@
var aliases; // defined at the bottom of this file.
module.exports = function (done) {
module.exports = function (branch, done) {
/**
* Read the API actions form the rest-api-spec repo.
* @type {[type]}
@ -17,6 +15,9 @@ module.exports = function (done) {
var apiSpec; // populated by parseSpecFiles
var docVars; // slightly modified clone of apiSpec for the docs
var branchSuffix = branch === 'master' ? '' : '_' + _.snakeCase(branch);
var aliases = require('./aliases' + branchSuffix);
// generate the API
async.series([
readSpecFiles,
@ -28,9 +29,11 @@ module.exports = function (done) {
], done);
function readSpecFiles(done) {
var apiDir = '../../src/elasticsearch/rest-api-spec/api/';
var apiDir = require('path').join(__dirname, '../../src/elasticsearch/rest-api-spec/api/');
files = fs.readdirSync(apiDir).map(function (filename) {
return require(apiDir + filename);
var module = require(apiDir + filename);
delete require.cache[apiDir + filename];
return module;
});
done();
}
@ -68,9 +71,10 @@ module.exports = function (done) {
}
function writeApiFile(done) {
var outputPath = require('path').join(__dirname, '../../src/lib/api.js');
console.log('writing', apiSpec.actions.length, 'api actions to', outputPath);
fs.writeFile(outputPath, templates.apiFile(apiSpec), done);
var outputPath = require('path').join(__dirname, '../../src/lib/api' + branchSuffix + '.js');
fs.writeFileSync(outputPath, templates.apiFile(apiSpec));
console.log('wrote', apiSpec.actions.length, 'api actions to', outputPath);
done();
}
function ensureDocsDir(done) {
@ -102,7 +106,7 @@ module.exports = function (done) {
function writeMethodDocs(done) {
fs.writeFile(
'../../docs/api_methods.asciidoc',
'../../docs/api_methods' + branchSuffix + '.asciidoc',
templates.apiMethods(docVars),
done
);
@ -186,7 +190,7 @@ module.exports = function (done) {
});
if (urlSignatures.length !== _.unique(urlSignatures).length) {
throw new Error('Multiple URLS with the same signature detected for ' + spec.name);
throw new Error('Multiple URLS with the same signature detected for ' + spec.name + '\n' + _.pluck(urls, 'fmt').join('\n') + '\n');
}
if (urls.length > 1) {
@ -290,68 +294,3 @@ module.exports = function (done) {
}
};
aliases = {
'cluster.nodeHotThreads': [
'/_cluster/nodes/hotthreads',
'/_cluster/nodes/hot_threads',
'/_nodes/hot_threads',
'/_cluster/nodes/{node_id}/hotthreads',
'/_cluster/nodes/{node_id}/hot_threads',
'/_nodes/{node_id}/hot_threads'
],
'cluster.nodeInfo': [
'/_cluster/nodes',
'/_nodes/settings',
'/_nodes/os',
'/_nodes/process',
'/_nodes/jvm',
'/_nodes/thread_pool',
'/_nodes/network',
'/_nodes/transport',
'/_nodes/http',
'/_nodes/plugin',
'/_cluster/nodes/{node_id}',
'/_nodes/{node_id}/settings',
'/_nodes/{node_id}/os',
'/_nodes/{node_id}/process',
'/_nodes/{node_id}/jvm',
'/_nodes/{node_id}/thread_pool',
'/_nodes/{node_id}/network',
'/_nodes/{node_id}/transport',
'/_nodes/{node_id}/http',
'/_nodes/{node_id}/plugin'
],
'cluster.nodeShutdown': [
'/_cluster/nodes/_shutdown'
],
'cluster.nodeStats': [
'/_cluster/nodes/stats',
'/_nodes/stats/{metric_family}',
'/_nodes/stats/indices/{metric}/{fields}',
'/_cluster/nodes/{node_id}/stats',
'/_nodes/{node_id}/stats/{metric_family}',
'/_nodes/{node_id}/stats/indices/{metric}/{fields}'
],
'get': [
'/{index}/{type}/{id}/_source'
],
'indices.deleteMapping': [
'/{index}/{type}/_mapping'
],
'indices.deleteWarmer': [
'/{index}/_warmer',
'/{index}/_warmers',
'/{index}/_warmers/{name}'
],
'indices.stats': [
'_stats/{metric_family}',
'/_stats/indexing',
'/_stats/indexing/{indexing_types}',
'/_stats/search/{search_groups}',
'/_stats/fielddata/{fields}',
'/{index}/_stats/{metric_family}',
'/{index}/_stats/indexing',
'/{index}/_stats/search/{search_groups}',
'/{index}/_stats/fielddata/{fields}'
]
};

View File

@ -1,14 +1,16 @@
module.exports = function (done) {
module.exports = function (branch, done) {
/**
* Creates a JSON version of the YAML test suite that can be simply bundled for use in the browser.
*/
var jsYaml = require('js-yaml');
var fs = require('relative-fs').relativeTo(__dirname);
var async = require('async');
var _ = require('../../src/lib/utils');
var path = require('path');
var tests = {}; // populated in readYamlTests
var branchSuffix = branch === 'master' ? '' : '_' + _.snakeCase(branch);
// generate the yaml tests
async.series([
readYamlTests,
@ -38,7 +40,7 @@ module.exports = function (done) {
}
function writeYamlTests(done) {
var testFile = require('path').resolve(__dirname, '../../test/integration/yaml_suite/yaml_tests.json');
var testFile = require('path').resolve(__dirname, '../../test/integration/yaml_suite/yaml_tests' + branchSuffix + '.json');
fs.writeFileSync(testFile, JSON.stringify(tests, null, ' '), 'utf8');
console.log('wrote YAML tests as JSON to', testFile);
done();