From 3e66652287593c84ae64b82db4e27d80823dcf9b Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Mon, 5 Jan 2015 15:43:47 -0700 Subject: [PATCH 01/11] save --- grunt/config/esvm.js | 49 +++++++++++++ grunt/config/run.js | 38 ---------- grunt/tasks.js | 8 +- package.json | 3 +- scripts/_utils.sh | 171 ------------------------------------------- scripts/ci.js | 121 ++++++++++++++++++++++++++++++ scripts/ci.sh | 84 ++++++++++----------- scripts/es.sh | 26 ------- scripts/jenkins.sh | 1 - 9 files changed, 219 insertions(+), 282 deletions(-) create mode 100644 grunt/config/esvm.js delete mode 100644 scripts/_utils.sh create mode 100644 scripts/ci.js delete mode 100755 scripts/es.sh diff --git a/grunt/config/esvm.js b/grunt/config/esvm.js new file mode 100644 index 000000000..5e14e2be7 --- /dev/null +++ b/grunt/config/esvm.js @@ -0,0 +1,49 @@ +var utils = require('../utils'); +var _ = require('lodash-node'); + +exports.options = { + nodes: 1, + config: { + 'node.name': 'elasticsearch_js_test_runner', + 'cluster.name': 'elasticsearch_js_test_runners', + 'http.port': 9400, + 'network.host': 'localhost', + 'discovery.zen.ping_timeout': 1, + 'discovery.zen.ping.multicast.enabled': false, + 'logger.level': 'ERROR', + } +}; + +// targets for each branch +utils.branches.forEach(function (branch) { + exports[branch] = { + options: { + branch: branch + } + }; + + switch (branch) { + case '0.90': + case '1.0': + case '1.1': + // no special treatment + break; + default: + exports[branch].options.config = _.merge({ + 'node.bench': true, + 'script.disable_dynamic': false + }, exports.options.config); + + break; + } +}); + +// ci target, based on env variables +var ciVersion = process.env.ES_RELEASE; +var ciBranch = process.env.TESTING_BRANCH; +exports.ci_env = { + options: { + version: ciVersion, + branch: !ciVersion && ciBranch, + } +}; \ No newline at end of file diff --git a/grunt/config/run.js b/grunt/config/run.js index 92dc3afa3..a9f521974 100644 --- a/grunt/config/run.js +++ b/grunt/config/run.js @@ -1,13 +1,3 @@ -var esOpts = [ - '-D es.http.port=9400', - '-D es.network.host=localhost', - '-D es.cluster.name=elasticsearch_js_test_runners', - '-D es.node.name=elasticsearch_js_test_runner', - '-D es.discovery.zen.ping.multicast.enabled=false', - '-D es.discovery.zen.ping_timeout=1', - '-D es.logger.level=ERROR', -]; - var utils = require('../utils'); var config = { @@ -63,37 +53,9 @@ var config = { }; utils.branches.forEach(function (branch) { - config['generate_' + branch] = { exec: 'node ./scripts/generate/index.js --branch=' + branch }; - - config['install_es_' + branch] = { - exec: './scripts/es.sh install ' + branch, - }; - - var args = esOpts.slice(0); - - switch (branch) { - case '0.90': - args.push('-f'); - break; - case '1.0': - case '1.1': - // no special treatment - break; - default: - args.push('-Des.node.bench=true', '-Des.script.disable_dynamic=false'); - break; - } - - config['es_' + branch] = { - exec: './.snapshots/' + branch + '_nightly/bin/elasticsearch ' + args.join(' '), - options: { - wait: false, - quiet: true - } - }; }); module.exports = config; \ No newline at end of file diff --git a/grunt/tasks.js b/grunt/tasks.js index 10ae320a8..d44fa612a 100644 --- a/grunt/tasks.js +++ b/grunt/tasks.js @@ -21,10 +21,9 @@ module.exports = function (grunt) { branches.forEach(function (branch) { tasks.push( - 'run:install_es_' + branch, - 'run:es_' + branch, + 'esvm:' + branch, 'mochacov:integration_' + branch, - 'stop:es_' + branch + 'esvm_shutdown:' + branch ); }); @@ -72,6 +71,7 @@ module.exports = function (grunt) { writeFile(browserBuildsPath, browserBuilds), writeFile(packagePath, JSON.stringify(pkg, null, ' ')) ]); - }).nodeify(this.async()); + }) + .nodeify(this.async()); }); }; \ No newline at end of file diff --git a/package.json b/package.json index b2c0a4f64..c364cec3a 100644 --- a/package.json +++ b/package.json @@ -52,6 +52,7 @@ "grunt-contrib-jshint": "spenceralger/grunt-contrib-jshint", "grunt-contrib-uglify": "~0.2.7", "grunt-contrib-watch": "~0.5.3", + "grunt-esvm": "~0.2.1", "grunt-mocha-cov": "~0.2.0", "grunt-open": "~0.2.2", "grunt-prompt": "~0.1.2", @@ -91,4 +92,4 @@ "engines": { "node": ">=0.8 <0.11" } -} \ No newline at end of file +} diff --git a/scripts/_utils.sh b/scripts/_utils.sh deleted file mode 100644 index 0101223ba..000000000 --- a/scripts/_utils.sh +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env bash - -##### -# Start a group of log output -##### -function group { - style='\x1b[1m\x1b[37m\x1b[4m' - reset='\x1b[24m\x1b[39m\x1b[22m' - - echo -en "\n\n${style}${1}${reset}\n" -} - -##### -# Do, log, and check a call -##### -function call { - local DO="$*" - echo "\$ ${DO}" - echo "$DO" | bash - local RESULT=$? - if [ "$RESULT" -gt "0" ]; then - echo "non-zero exit code: $RESULT" - exit $RESULT - fi -} - -function ensure_grunt { - if [[ ! -x "$(which grunt)" ]]; then - group "installing grunt" - call npm install --silent -g grunt-cli - fi -} - -##### -# call grunt, but make sure it's installed first -##### -function _grunt { - ensure_grunt - call grunt "$*" -} - -##### -# Download a version of ES and get it running -# @arg ES_BRANCH - The branch to run off of -# @arg ES_RELEASE - The specific release to run, overrides ES_BRANCH -##### -function manage_es { - local DO=$1 - local ES_BRANCH=$2 - local ES_RELEASE=$3 - - local ROOT="$PWD" - local SNAPSHOTS="$ROOT/.snapshots" - local PIDS="$ROOT/.snapshots/pids" - - group "${DO}ing es" - - if [ ! -d "$PIDS" ]; then - call mkdir -p "$PIDS" - fi - - if [ -n "$ES_RELEASE" ]; then - local ES_VERSION="v${ES_RELEASE}" - local ES_URL="https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ES_RELEASE}.zip" - local ES_DIR="${SNAPSHOTS}/${ES_VERSION}" - else - local BUCKET='s3-us-west-2.amazonaws.com/build.elasticsearch.org' - - # TODO: TRASH THIS - if [[ $ES_BRANCH == "1.x" ]]; then local JDK='JDK7' - elif [[ $ES_BRANCH == "1.2" ]]; then local JDK='JDK7' - elif [[ $ES_BRANCH == "1.3" ]]; then local JDK='JDK7' - elif [[ $ES_BRANCH == "1.4" || $ES_BRANCH == "master" ]]; then - local JDK='JDK7' - local BUCKET='s3-eu-west-1.amazonaws.com/build-eu.elasticsearch.org' - else local JDK='JDK6' - fi - - local ES_VERSION="${ES_BRANCH}_nightly" - local ES_URL="http://$BUCKET/origin/$ES_BRANCH/nightly/$JDK/elasticsearch-latest-SNAPSHOT.zip" - local DATE="$(date +%Y_%m_%d)" - local ES_DIR="${SNAPSHOTS}/${ES_VERSION}_${DATE}" - fi - - local ES_BIN="$ES_DIR/bin/elasticsearch" - local PIDFILE="$ROOT/.snapshots/pids/$ES_VERSION" - - - case "$DO" in - reinstall) - if [ -x "$ES_BIN" ]; then - echo "removing $ES_VERSION" - rm -rf "${SNAPSHOTS}/${ES_VERSION}*" - fi - manage_es install "$ES_BRANCH" "$ES_RELEASE" - ;; - install) - if [ ! -x "$ES_BIN" ]; then - echo "Downloading Elasticsearch $ES_VERSION" - rm -rf "${SNAPSHOTS}/${ES_VERSION}*" - call curl --silent -O "$ES_URL" - unzip -q elasticsearch-*.zip - rm elasticsearch-*.zip - mv elasticsearch-*/ "$ES_DIR" - if [ -z "$ES_RELEASE" ]; then - ln -sf "$ES_DIR" "${SNAPSHOTS}/${ES_VERSION}" - fi - else - echo "$ES_VERSION installed" - fi - ;; - start) - # ensure that only one version is running at a time so that we can precisely kill them - if [ -f "$PIDFILE" ]; then - local PID="$(cat "$PIDFILE")" - kill -0 "$PID" - local RUNNING=$? - - if [ $RUNNING -eq 0 ]; then - echo "Already running $ES_VERSION" - return 1 - else - echo "PID file was left behind by ES" - rm "$PIDFILE" - fi - fi - - manage_es install "$ES_BRANCH" "$ES_RELEASE" - - if [ ! -x "$ES_BIN" ]; then - echo "Unable to find elasticsearch executable" - return 1 - fi - - local ES_OPTS="-p $PIDFILE -D es.http.port=9400 -D es.network.host=localhost -D es.cluster.name=elasticsearch_js_test_runners -D es.node.name=elasticsearch_js_test_runner -D es.discovery.zen.ping.multicast.enabled=false -D es.discovery.zen.ping_timeout=1 -D es.logger.level=ERROR" - - if [ -n "$ES_NODE_NAME" ]; then - ES_OPTS="$ES_OPTS -Des.node.name=$ES_NODE_NAME" - fi - - if [[ $ES_BRANCH != "0.90" ]]; then - # explicitly run as deamon - ES_OPTS="-d $ES_OPTS" - fi - - if [[ $ES_BRANCH != "1.0" && $ES_BRANCH != "1.1" ]]; then - # enable scripting and benchmarks - ES_OPTS="$ES_OPTS -D es.node.bench=true -D es.script.disable_dynamic=false" - fi - - call "$ES_BIN" "$ES_OPTS" - ;; - stop) - if [ -e "$PIDFILE" ]; then - local PID="$(cat "$PIDFILE")" - kill -0 "$PID" - local RUNNING=$? - - if [ $RUNNING -eq 0 ]; then - kill "$PID" - echo "Elasticsearch $ES_VERSION stopped" - return 0 - fi - - rm "$PIDFILE" - fi - echo "Elasticsearch $ES_VERSION is not running." - return 1 - ;; - esac -} \ No newline at end of file diff --git a/scripts/ci.js b/scripts/ci.js new file mode 100644 index 000000000..9b64180f9 --- /dev/null +++ b/scripts/ci.js @@ -0,0 +1,121 @@ +#!/usr/bin/env node + +/** + * Run the tests, and setup es if needed + * + * ENV VARS: + * ES_BRANCH - the ES branch we should use to generate the tests and download es + * ES_RELEASE - a specific ES release to download in use for testing + * NODE_UNIT=1 - 0/1 run the unit tests in node + * NODE_INTEGRATION=1 - 0/1 run the integration tests in node + * BROWSER_UNIT - the browser to test in using, sauce labs. One of 'ie', 'firefox', 'chrome' + * COVERAGE - 0/1 check for coverage and ship it to coveralls + *******/ + +var Promise = require('bluebird'); +var _ = require('lodash-node'); +var join = require('path').join; +var fs = require('fs'); +var child_process = require('child_process'); + +var ROOT = join(__dirname, '..'); +var GRUNT = join(ROOT, './node_modules/.bin/grunt'); +var MOCHA = join(ROOT, './node_modules/.bin/mocha'); +var BRANCH = process.env.ES_BRANCH || 'master'; +var MOCHA_REPORTER = 'test/utils/jenkins-reporter.js'; + +var JENKINS = !!process.env.JENKINS; +var NODE_UNIT = process.env.NODE_UNIT !== '0'; +var NODE_INTEGRATION = process.env.NODE_UNIT !== '0'; +var BROWSER_UNIT = process.env.NODE_UNIT === '1'; +var COVERAGE = process.env.NODE_UNIT === '1'; + +function spawn(file, args, opts, block) { + return new Promise(function (resolve, reject) { + var cp = child_process.spawn(GRUNT, args, _.defaults(opts || {}, { + cwd: ROOT, + env: process.env, + stdio: 'inherit' + })); + + block && block(cp); + + cp.on('exit', function (code) { + if (code > 1) { + reject(new Error('non-zero exit code: ' + code)); + } else { + resolve(); + } + }); + }); +} + +function node(/*args... */) { + return spawn('node', _.rest(arguments)); +} + +function grunt(/* args... */) { + return spawn(GRUNT, _.rest(arguments)); +} + +function mocha(report/*, args... */) { + return spawn(MOCHA, _.rest(arguments, 1), { stdio: [0, 1, 'pipe'] }, function (cp) { + cp.stderr.pipe(fs.createWriteStream(report)); + }); +} + +var chain = Promise.resolve(); + +if (NODE_UNIT && !JENKINS) { + chain = chain.then(function () { + return grunt('jshint', 'mochacov:unit'); + }); +} + + +if (NODE_UNIT && JENKINS) { + chain = chain.then(function () { + var report = join(ROOT, 'test/junit-node-unit.xml'); + var tests = join(ROOT, 'test/unit/index.js'); + + return mocha(report, tests, '--reporter', join(ROOT, MOCHA_REPORTER)); + }); +} + +if (NODE_INTEGRATION) { + chain = chain.then(function () { + return node('scripts/generate.js', '--no-api', '--branch', BRANCH); + }); +} + +if (NODE_INTEGRATION && !JENKINS) { + chain = chain.then(function () { + grunt('esvm:ci_env', 'mochacov:integration_' + BRANCH, 'esvm_shutdown:ci_env'); + }); +} + +if (NODE_INTEGRATION && JENKINS) { + chain = chain.then(function () { + var branchSuffix = '_' + BRANCH.replace(/\./g, '_'); + var tests = 'test/integration/yaml_suite/index' + branchSuffix + '.js'; + var esPort = process.env.es_port || 9200; + var report = 'test/junit-node-integration.xml'; + + return mocha(report, tests, '--host', 'localhost', '--port', esPort, '--reporter', MOCHA_REPORTER); + }); +} + +if (BROWSER_UNIT) { + chain = chain.then(function () { + return grunt('browser_clients:build', 'run:browser_test_server', 'saucelabs-mocha'); + }); +} + +if (COVERAGE) { + chain = chain.then(function () { + return grunt('mochacov:ship_coverage'); + }) + .catch(function () { + console.log('FAILED TO SHIP COVERAGE! but that\'s normal'); + }); +} \ No newline at end of file diff --git a/scripts/ci.sh b/scripts/ci.sh index b19e00faf..45dc341f6 100755 --- a/scripts/ci.sh +++ b/scripts/ci.sh @@ -13,13 +13,22 @@ # ########### -export ES_NODE_NAME="elasticsearch_js_test_runner" - -HERE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" MOCHA="./node_modules/.bin/mocha" MOCHA_REPORTER="../../../test/utils/jenkins-reporter.js" -source $HERE/_utils.sh +# execute a command, and exit if it fails +function crit { + $* + CODE=$? + if [[ $CODE -gt 0 ]]; then + echo "last command was critical, but it reported non-zero exit code $CODE"; + exit; + fi +} + +if [[ "$(which grunt)" == "" ]]; then + crit npm install -g grunt +fi # normalize ES_BRANCH into TESTING_BRANCH if [[ -n "$ES_BRANCH" ]]; then @@ -29,55 +38,48 @@ else fi if [[ "$NODE_UNIT" != "0" ]]; then - group "running unit tests" - if [[ -n "$JENKINS" ]]; then - $MOCHA test/unit/index.js --reporter $MOCHA_REPORTER 2> test/junit-node-unit.xml - if [ "$?" -gt "0" ]; then - echo "non-zero exit code: $RESULT" - cat test/junit-node-unit.xml - fi - else - _grunt jshint mochacov:unit + if [[ -n "$JENKINS" ]]; then + $MOCHA test/unit/index.js --reporter $MOCHA_REPORTER 2> test/junit-node-unit.xml + if [ "$?" -gt "0" ]; then + echo "non-zero exit code: $RESULT" + cat test/junit-node-unit.xml fi + else + crit grunt jshint mochacov:unit + fi fi if [[ "$NODE_INTEGRATION" != "0" ]]; then - group "generating tests" - call node scripts/generate --no-api --branch $TESTING_BRANCH + crit node scripts/generate --no-api --branch $TESTING_BRANCH - group "running integration tests" - if [[ -n "$JENKINS" ]]; then - # convert TESTING_BRANCH into BRANCH_SUFFIX - BRANCH_SUFFIX="_${TESTING_BRANCH//./_}" + if [[ -n "$JENKINS" ]]; then + # convert TESTING_BRANCH into BRANCH_SUFFIX + BRANCH_SUFFIX="_${TESTING_BRANCH//./_}" - # find value of ES_PORT - if [[ -n "$es_port" ]]; then - # jenkins - ES_PORT=$es_port - else - ES_PORT=9200 - fi - - FILES=test/integration/yaml_suite/index${BRANCH_SUFFIX}.js - $MOCHA $FILES --host localhost --port $ES_PORT --reporter $MOCHA_REPORTER 2> test/junit-node-integration.xml - if [ "$?" -gt "0" ]; then - echo "non-zero exit code: $RESULT" - cat test/junit-node-unit.xml - fi + # find value of ES_PORT + if [[ -n "$es_port" ]]; then + # jenkins + ES_PORT=$es_port else - manage_es start $TESTING_BRANCH $ES_RELEASE - _grunt mochacov:integration_$TESTING_BRANCH - manage_es stop $TESTING_BRANCH $ES_RELEASE + ES_PORT=9200 fi + + FILES=test/integration/yaml_suite/index${BRANCH_SUFFIX}.js + $MOCHA $FILES --host localhost --port $ES_PORT --reporter $MOCHA_REPORTER 2> test/junit-node-integration.xml + if [ "$?" -gt "0" ]; then + echo "non-zero exit code: $RESULT" + cat test/junit-node-unit.xml + fi + else + crit grunt esvm:ci_env "mochacov:integration_${TESTING_BRANCH}" esvm_shutdown:ci_env + fi fi if [[ "$BROWSER_UNIT" == "1" ]]; then - group "running browser tests" - _grunt browser_clients:build run:browser_test_server saucelabs-mocha + crit grunt browser_clients:build run:browser_test_server saucelabs-mocha fi if [[ "$COVERAGE" == "1" ]]; then - group "shipping coverage" - # don't fail even if this does - _grunt --force mochacov:ship_coverage + # don't fail even if this does + grunt --force mochacov:ship_coverage fi diff --git a/scripts/es.sh b/scripts/es.sh deleted file mode 100755 index 602cff05a..000000000 --- a/scripts/es.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash - -if [ -z "$2" ]; then - echo "Usage: -From the root of the elasticsearch-js project call: - -Start nightly: - ./scripts/es.sh start master - -Stop 0.90 branch: - ./scripts/es.sh stop 0.90 - -Start relase version 0.90.7: - ./scripts/es.sh start 0.90 0.90.7 -" - exit 1 -fi - -source scripts/_utils.sh - -if [[ -z "$ES_NODE_NAME" ]]; then - export ES_NODE_NAME="elasticsearch_js_test_runner" -fi - -manage_es $* -exit $? \ No newline at end of file diff --git a/scripts/jenkins.sh b/scripts/jenkins.sh index 1ec203b15..180c3e84f 100755 --- a/scripts/jenkins.sh +++ b/scripts/jenkins.sh @@ -1,6 +1,5 @@ #!/usr/bin/env bash -export VERBOSE="true" export JENKINS="true" HERE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" From 6ac998d55074571fb601b623ec0ef3552feea905 Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Mon, 5 Jan 2015 16:08:22 -0700 Subject: [PATCH 02/11] Revert "save" This reverts commit 3e66652287593c84ae64b82db4e27d80823dcf9b. --- grunt/config/esvm.js | 49 ------------- grunt/config/run.js | 38 ++++++++++ grunt/tasks.js | 8 +- package.json | 3 +- scripts/_utils.sh | 171 +++++++++++++++++++++++++++++++++++++++++++ scripts/ci.js | 121 ------------------------------ scripts/ci.sh | 84 +++++++++++---------- scripts/es.sh | 26 +++++++ scripts/jenkins.sh | 1 + 9 files changed, 282 insertions(+), 219 deletions(-) delete mode 100644 grunt/config/esvm.js create mode 100644 scripts/_utils.sh delete mode 100644 scripts/ci.js create mode 100755 scripts/es.sh diff --git a/grunt/config/esvm.js b/grunt/config/esvm.js deleted file mode 100644 index 5e14e2be7..000000000 --- a/grunt/config/esvm.js +++ /dev/null @@ -1,49 +0,0 @@ -var utils = require('../utils'); -var _ = require('lodash-node'); - -exports.options = { - nodes: 1, - config: { - 'node.name': 'elasticsearch_js_test_runner', - 'cluster.name': 'elasticsearch_js_test_runners', - 'http.port': 9400, - 'network.host': 'localhost', - 'discovery.zen.ping_timeout': 1, - 'discovery.zen.ping.multicast.enabled': false, - 'logger.level': 'ERROR', - } -}; - -// targets for each branch -utils.branches.forEach(function (branch) { - exports[branch] = { - options: { - branch: branch - } - }; - - switch (branch) { - case '0.90': - case '1.0': - case '1.1': - // no special treatment - break; - default: - exports[branch].options.config = _.merge({ - 'node.bench': true, - 'script.disable_dynamic': false - }, exports.options.config); - - break; - } -}); - -// ci target, based on env variables -var ciVersion = process.env.ES_RELEASE; -var ciBranch = process.env.TESTING_BRANCH; -exports.ci_env = { - options: { - version: ciVersion, - branch: !ciVersion && ciBranch, - } -}; \ No newline at end of file diff --git a/grunt/config/run.js b/grunt/config/run.js index a9f521974..92dc3afa3 100644 --- a/grunt/config/run.js +++ b/grunt/config/run.js @@ -1,3 +1,13 @@ +var esOpts = [ + '-D es.http.port=9400', + '-D es.network.host=localhost', + '-D es.cluster.name=elasticsearch_js_test_runners', + '-D es.node.name=elasticsearch_js_test_runner', + '-D es.discovery.zen.ping.multicast.enabled=false', + '-D es.discovery.zen.ping_timeout=1', + '-D es.logger.level=ERROR', +]; + var utils = require('../utils'); var config = { @@ -53,9 +63,37 @@ var config = { }; utils.branches.forEach(function (branch) { + config['generate_' + branch] = { exec: 'node ./scripts/generate/index.js --branch=' + branch }; + + config['install_es_' + branch] = { + exec: './scripts/es.sh install ' + branch, + }; + + var args = esOpts.slice(0); + + switch (branch) { + case '0.90': + args.push('-f'); + break; + case '1.0': + case '1.1': + // no special treatment + break; + default: + args.push('-Des.node.bench=true', '-Des.script.disable_dynamic=false'); + break; + } + + config['es_' + branch] = { + exec: './.snapshots/' + branch + '_nightly/bin/elasticsearch ' + args.join(' '), + options: { + wait: false, + quiet: true + } + }; }); module.exports = config; \ No newline at end of file diff --git a/grunt/tasks.js b/grunt/tasks.js index d44fa612a..10ae320a8 100644 --- a/grunt/tasks.js +++ b/grunt/tasks.js @@ -21,9 +21,10 @@ module.exports = function (grunt) { branches.forEach(function (branch) { tasks.push( - 'esvm:' + branch, + 'run:install_es_' + branch, + 'run:es_' + branch, 'mochacov:integration_' + branch, - 'esvm_shutdown:' + branch + 'stop:es_' + branch ); }); @@ -71,7 +72,6 @@ module.exports = function (grunt) { writeFile(browserBuildsPath, browserBuilds), writeFile(packagePath, JSON.stringify(pkg, null, ' ')) ]); - }) - .nodeify(this.async()); + }).nodeify(this.async()); }); }; \ No newline at end of file diff --git a/package.json b/package.json index c364cec3a..b2c0a4f64 100644 --- a/package.json +++ b/package.json @@ -52,7 +52,6 @@ "grunt-contrib-jshint": "spenceralger/grunt-contrib-jshint", "grunt-contrib-uglify": "~0.2.7", "grunt-contrib-watch": "~0.5.3", - "grunt-esvm": "~0.2.1", "grunt-mocha-cov": "~0.2.0", "grunt-open": "~0.2.2", "grunt-prompt": "~0.1.2", @@ -92,4 +91,4 @@ "engines": { "node": ">=0.8 <0.11" } -} +} \ No newline at end of file diff --git a/scripts/_utils.sh b/scripts/_utils.sh new file mode 100644 index 000000000..0101223ba --- /dev/null +++ b/scripts/_utils.sh @@ -0,0 +1,171 @@ +#!/usr/bin/env bash + +##### +# Start a group of log output +##### +function group { + style='\x1b[1m\x1b[37m\x1b[4m' + reset='\x1b[24m\x1b[39m\x1b[22m' + + echo -en "\n\n${style}${1}${reset}\n" +} + +##### +# Do, log, and check a call +##### +function call { + local DO="$*" + echo "\$ ${DO}" + echo "$DO" | bash + local RESULT=$? + if [ "$RESULT" -gt "0" ]; then + echo "non-zero exit code: $RESULT" + exit $RESULT + fi +} + +function ensure_grunt { + if [[ ! -x "$(which grunt)" ]]; then + group "installing grunt" + call npm install --silent -g grunt-cli + fi +} + +##### +# call grunt, but make sure it's installed first +##### +function _grunt { + ensure_grunt + call grunt "$*" +} + +##### +# Download a version of ES and get it running +# @arg ES_BRANCH - The branch to run off of +# @arg ES_RELEASE - The specific release to run, overrides ES_BRANCH +##### +function manage_es { + local DO=$1 + local ES_BRANCH=$2 + local ES_RELEASE=$3 + + local ROOT="$PWD" + local SNAPSHOTS="$ROOT/.snapshots" + local PIDS="$ROOT/.snapshots/pids" + + group "${DO}ing es" + + if [ ! -d "$PIDS" ]; then + call mkdir -p "$PIDS" + fi + + if [ -n "$ES_RELEASE" ]; then + local ES_VERSION="v${ES_RELEASE}" + local ES_URL="https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ES_RELEASE}.zip" + local ES_DIR="${SNAPSHOTS}/${ES_VERSION}" + else + local BUCKET='s3-us-west-2.amazonaws.com/build.elasticsearch.org' + + # TODO: TRASH THIS + if [[ $ES_BRANCH == "1.x" ]]; then local JDK='JDK7' + elif [[ $ES_BRANCH == "1.2" ]]; then local JDK='JDK7' + elif [[ $ES_BRANCH == "1.3" ]]; then local JDK='JDK7' + elif [[ $ES_BRANCH == "1.4" || $ES_BRANCH == "master" ]]; then + local JDK='JDK7' + local BUCKET='s3-eu-west-1.amazonaws.com/build-eu.elasticsearch.org' + else local JDK='JDK6' + fi + + local ES_VERSION="${ES_BRANCH}_nightly" + local ES_URL="http://$BUCKET/origin/$ES_BRANCH/nightly/$JDK/elasticsearch-latest-SNAPSHOT.zip" + local DATE="$(date +%Y_%m_%d)" + local ES_DIR="${SNAPSHOTS}/${ES_VERSION}_${DATE}" + fi + + local ES_BIN="$ES_DIR/bin/elasticsearch" + local PIDFILE="$ROOT/.snapshots/pids/$ES_VERSION" + + + case "$DO" in + reinstall) + if [ -x "$ES_BIN" ]; then + echo "removing $ES_VERSION" + rm -rf "${SNAPSHOTS}/${ES_VERSION}*" + fi + manage_es install "$ES_BRANCH" "$ES_RELEASE" + ;; + install) + if [ ! -x "$ES_BIN" ]; then + echo "Downloading Elasticsearch $ES_VERSION" + rm -rf "${SNAPSHOTS}/${ES_VERSION}*" + call curl --silent -O "$ES_URL" + unzip -q elasticsearch-*.zip + rm elasticsearch-*.zip + mv elasticsearch-*/ "$ES_DIR" + if [ -z "$ES_RELEASE" ]; then + ln -sf "$ES_DIR" "${SNAPSHOTS}/${ES_VERSION}" + fi + else + echo "$ES_VERSION installed" + fi + ;; + start) + # ensure that only one version is running at a time so that we can precisely kill them + if [ -f "$PIDFILE" ]; then + local PID="$(cat "$PIDFILE")" + kill -0 "$PID" + local RUNNING=$? + + if [ $RUNNING -eq 0 ]; then + echo "Already running $ES_VERSION" + return 1 + else + echo "PID file was left behind by ES" + rm "$PIDFILE" + fi + fi + + manage_es install "$ES_BRANCH" "$ES_RELEASE" + + if [ ! -x "$ES_BIN" ]; then + echo "Unable to find elasticsearch executable" + return 1 + fi + + local ES_OPTS="-p $PIDFILE -D es.http.port=9400 -D es.network.host=localhost -D es.cluster.name=elasticsearch_js_test_runners -D es.node.name=elasticsearch_js_test_runner -D es.discovery.zen.ping.multicast.enabled=false -D es.discovery.zen.ping_timeout=1 -D es.logger.level=ERROR" + + if [ -n "$ES_NODE_NAME" ]; then + ES_OPTS="$ES_OPTS -Des.node.name=$ES_NODE_NAME" + fi + + if [[ $ES_BRANCH != "0.90" ]]; then + # explicitly run as deamon + ES_OPTS="-d $ES_OPTS" + fi + + if [[ $ES_BRANCH != "1.0" && $ES_BRANCH != "1.1" ]]; then + # enable scripting and benchmarks + ES_OPTS="$ES_OPTS -D es.node.bench=true -D es.script.disable_dynamic=false" + fi + + call "$ES_BIN" "$ES_OPTS" + ;; + stop) + if [ -e "$PIDFILE" ]; then + local PID="$(cat "$PIDFILE")" + kill -0 "$PID" + local RUNNING=$? + + if [ $RUNNING -eq 0 ]; then + kill "$PID" + echo "Elasticsearch $ES_VERSION stopped" + return 0 + fi + + rm "$PIDFILE" + fi + echo "Elasticsearch $ES_VERSION is not running." + return 1 + ;; + esac +} \ No newline at end of file diff --git a/scripts/ci.js b/scripts/ci.js deleted file mode 100644 index 9b64180f9..000000000 --- a/scripts/ci.js +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env node - -/** - * Run the tests, and setup es if needed - * - * ENV VARS: - * ES_BRANCH - the ES branch we should use to generate the tests and download es - * ES_RELEASE - a specific ES release to download in use for testing - * NODE_UNIT=1 - 0/1 run the unit tests in node - * NODE_INTEGRATION=1 - 0/1 run the integration tests in node - * BROWSER_UNIT - the browser to test in using, sauce labs. One of 'ie', 'firefox', 'chrome' - * COVERAGE - 0/1 check for coverage and ship it to coveralls - *******/ - -var Promise = require('bluebird'); -var _ = require('lodash-node'); -var join = require('path').join; -var fs = require('fs'); -var child_process = require('child_process'); - -var ROOT = join(__dirname, '..'); -var GRUNT = join(ROOT, './node_modules/.bin/grunt'); -var MOCHA = join(ROOT, './node_modules/.bin/mocha'); -var BRANCH = process.env.ES_BRANCH || 'master'; -var MOCHA_REPORTER = 'test/utils/jenkins-reporter.js'; - -var JENKINS = !!process.env.JENKINS; -var NODE_UNIT = process.env.NODE_UNIT !== '0'; -var NODE_INTEGRATION = process.env.NODE_UNIT !== '0'; -var BROWSER_UNIT = process.env.NODE_UNIT === '1'; -var COVERAGE = process.env.NODE_UNIT === '1'; - -function spawn(file, args, opts, block) { - return new Promise(function (resolve, reject) { - var cp = child_process.spawn(GRUNT, args, _.defaults(opts || {}, { - cwd: ROOT, - env: process.env, - stdio: 'inherit' - })); - - block && block(cp); - - cp.on('exit', function (code) { - if (code > 1) { - reject(new Error('non-zero exit code: ' + code)); - } else { - resolve(); - } - }); - }); -} - -function node(/*args... */) { - return spawn('node', _.rest(arguments)); -} - -function grunt(/* args... */) { - return spawn(GRUNT, _.rest(arguments)); -} - -function mocha(report/*, args... */) { - return spawn(MOCHA, _.rest(arguments, 1), { stdio: [0, 1, 'pipe'] }, function (cp) { - cp.stderr.pipe(fs.createWriteStream(report)); - }); -} - -var chain = Promise.resolve(); - -if (NODE_UNIT && !JENKINS) { - chain = chain.then(function () { - return grunt('jshint', 'mochacov:unit'); - }); -} - - -if (NODE_UNIT && JENKINS) { - chain = chain.then(function () { - var report = join(ROOT, 'test/junit-node-unit.xml'); - var tests = join(ROOT, 'test/unit/index.js'); - - return mocha(report, tests, '--reporter', join(ROOT, MOCHA_REPORTER)); - }); -} - -if (NODE_INTEGRATION) { - chain = chain.then(function () { - return node('scripts/generate.js', '--no-api', '--branch', BRANCH); - }); -} - -if (NODE_INTEGRATION && !JENKINS) { - chain = chain.then(function () { - grunt('esvm:ci_env', 'mochacov:integration_' + BRANCH, 'esvm_shutdown:ci_env'); - }); -} - -if (NODE_INTEGRATION && JENKINS) { - chain = chain.then(function () { - var branchSuffix = '_' + BRANCH.replace(/\./g, '_'); - var tests = 'test/integration/yaml_suite/index' + branchSuffix + '.js'; - var esPort = process.env.es_port || 9200; - var report = 'test/junit-node-integration.xml'; - - return mocha(report, tests, '--host', 'localhost', '--port', esPort, '--reporter', MOCHA_REPORTER); - }); -} - -if (BROWSER_UNIT) { - chain = chain.then(function () { - return grunt('browser_clients:build', 'run:browser_test_server', 'saucelabs-mocha'); - }); -} - -if (COVERAGE) { - chain = chain.then(function () { - return grunt('mochacov:ship_coverage'); - }) - .catch(function () { - console.log('FAILED TO SHIP COVERAGE! but that\'s normal'); - }); -} \ No newline at end of file diff --git a/scripts/ci.sh b/scripts/ci.sh index 45dc341f6..b19e00faf 100755 --- a/scripts/ci.sh +++ b/scripts/ci.sh @@ -13,22 +13,13 @@ # ########### +export ES_NODE_NAME="elasticsearch_js_test_runner" + +HERE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" MOCHA="./node_modules/.bin/mocha" MOCHA_REPORTER="../../../test/utils/jenkins-reporter.js" -# execute a command, and exit if it fails -function crit { - $* - CODE=$? - if [[ $CODE -gt 0 ]]; then - echo "last command was critical, but it reported non-zero exit code $CODE"; - exit; - fi -} - -if [[ "$(which grunt)" == "" ]]; then - crit npm install -g grunt -fi +source $HERE/_utils.sh # normalize ES_BRANCH into TESTING_BRANCH if [[ -n "$ES_BRANCH" ]]; then @@ -38,48 +29,55 @@ else fi if [[ "$NODE_UNIT" != "0" ]]; then - if [[ -n "$JENKINS" ]]; then - $MOCHA test/unit/index.js --reporter $MOCHA_REPORTER 2> test/junit-node-unit.xml - if [ "$?" -gt "0" ]; then - echo "non-zero exit code: $RESULT" - cat test/junit-node-unit.xml + group "running unit tests" + if [[ -n "$JENKINS" ]]; then + $MOCHA test/unit/index.js --reporter $MOCHA_REPORTER 2> test/junit-node-unit.xml + if [ "$?" -gt "0" ]; then + echo "non-zero exit code: $RESULT" + cat test/junit-node-unit.xml + fi + else + _grunt jshint mochacov:unit fi - else - crit grunt jshint mochacov:unit - fi fi if [[ "$NODE_INTEGRATION" != "0" ]]; then - crit node scripts/generate --no-api --branch $TESTING_BRANCH + group "generating tests" + call node scripts/generate --no-api --branch $TESTING_BRANCH - if [[ -n "$JENKINS" ]]; then - # convert TESTING_BRANCH into BRANCH_SUFFIX - BRANCH_SUFFIX="_${TESTING_BRANCH//./_}" + group "running integration tests" + if [[ -n "$JENKINS" ]]; then + # convert TESTING_BRANCH into BRANCH_SUFFIX + BRANCH_SUFFIX="_${TESTING_BRANCH//./_}" - # find value of ES_PORT - if [[ -n "$es_port" ]]; then - # jenkins - ES_PORT=$es_port + # find value of ES_PORT + if [[ -n "$es_port" ]]; then + # jenkins + ES_PORT=$es_port + else + ES_PORT=9200 + fi + + FILES=test/integration/yaml_suite/index${BRANCH_SUFFIX}.js + $MOCHA $FILES --host localhost --port $ES_PORT --reporter $MOCHA_REPORTER 2> test/junit-node-integration.xml + if [ "$?" -gt "0" ]; then + echo "non-zero exit code: $RESULT" + cat test/junit-node-unit.xml + fi else - ES_PORT=9200 + manage_es start $TESTING_BRANCH $ES_RELEASE + _grunt mochacov:integration_$TESTING_BRANCH + manage_es stop $TESTING_BRANCH $ES_RELEASE fi - - FILES=test/integration/yaml_suite/index${BRANCH_SUFFIX}.js - $MOCHA $FILES --host localhost --port $ES_PORT --reporter $MOCHA_REPORTER 2> test/junit-node-integration.xml - if [ "$?" -gt "0" ]; then - echo "non-zero exit code: $RESULT" - cat test/junit-node-unit.xml - fi - else - crit grunt esvm:ci_env "mochacov:integration_${TESTING_BRANCH}" esvm_shutdown:ci_env - fi fi if [[ "$BROWSER_UNIT" == "1" ]]; then - crit grunt browser_clients:build run:browser_test_server saucelabs-mocha + group "running browser tests" + _grunt browser_clients:build run:browser_test_server saucelabs-mocha fi if [[ "$COVERAGE" == "1" ]]; then - # don't fail even if this does - grunt --force mochacov:ship_coverage + group "shipping coverage" + # don't fail even if this does + _grunt --force mochacov:ship_coverage fi diff --git a/scripts/es.sh b/scripts/es.sh new file mode 100755 index 000000000..602cff05a --- /dev/null +++ b/scripts/es.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +if [ -z "$2" ]; then + echo "Usage: +From the root of the elasticsearch-js project call: + +Start nightly: + ./scripts/es.sh start master + +Stop 0.90 branch: + ./scripts/es.sh stop 0.90 + +Start relase version 0.90.7: + ./scripts/es.sh start 0.90 0.90.7 +" + exit 1 +fi + +source scripts/_utils.sh + +if [[ -z "$ES_NODE_NAME" ]]; then + export ES_NODE_NAME="elasticsearch_js_test_runner" +fi + +manage_es $* +exit $? \ No newline at end of file diff --git a/scripts/jenkins.sh b/scripts/jenkins.sh index 180c3e84f..1ec203b15 100755 --- a/scripts/jenkins.sh +++ b/scripts/jenkins.sh @@ -1,5 +1,6 @@ #!/usr/bin/env bash +export VERBOSE="true" export JENKINS="true" HERE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" From 3e5df1b6addcdbb32fad36332366edce80689bc5 Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Tue, 6 Jan 2015 07:18:22 -0700 Subject: [PATCH 03/11] added exceptions for HTTPS/shield related statuses --- src/lib/errors.js | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/src/lib/errors.js b/src/lib/errors.js index 728c1f64c..dfc99c0f1 100644 --- a/src/lib/errors.js +++ b/src/lib/errors.js @@ -80,19 +80,19 @@ _.inherits(errors.RequestTypeError, ErrorAbstract); var statusCodes = { /** - * Service Unavailable + * ServiceUnavailable * @param {String} [msg] - An error message that will probably end up in a log. */ 503: 'Service Unavailable', /** - * Internal Server Error + * InternalServerError * @param {String} [msg] - An error message that will probably end up in a log. */ 500: 'Internal Server Error', /** - * Precondition Failed + * PreconditionFailed * @param {String} [msg] - An error message that will probably end up in a log. */ 412: 'Precondition Failed', @@ -104,25 +104,31 @@ var statusCodes = { 409: 'Conflict', /** - * Forbidden + * AuthorizationException * @param {String} [msg] - An error message that will probably end up in a log. */ - 403: 'Forbidden', + 403: 'Authorization Exception', /** - * Not Found + * NotFound * @param {String} [msg] - An error message that will probably end up in a log. */ 404: 'Not Found', /** - * Bad Request + * AuthenticationException + * @param {String} [msg] - An error message that will probably end up in a log. + */ + 401: 'Authentication Exception', + + /** + * BadRequest * @param {String} [msg] - An error message that will probably end up in a log. */ 400: 'Bad Request', /** - * Moved Permanently + * MovedPermanently * @param {String} [msg] - An error message that will probably end up in a log. */ 301: 'Moved Permanently' @@ -138,4 +144,4 @@ _.each(statusCodes, function (name, status) { _.inherits(StatusCodeError, ErrorAbstract); errors[className] = StatusCodeError; errors[status] = StatusCodeError; -}); +}); \ No newline at end of file From 3568fcbaefcc5b02a24739feacfa8f42f560211d Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Tue, 6 Jan 2015 07:38:03 -0700 Subject: [PATCH 04/11] updated ssl/https docs --- docs/api_conventions.asciidoc | 1 + docs/host.asciidoc | 21 ++++++++------ docs/index.asciidoc | 2 ++ docs/ssl_authentication.asciidoc | 49 ++++++++++++++++++++++++++++++++ 4 files changed, 64 insertions(+), 9 deletions(-) create mode 100644 docs/ssl_authentication.asciidoc diff --git a/docs/api_conventions.asciidoc b/docs/api_conventions.asciidoc index 6afee9f4c..842e6a14b 100755 --- a/docs/api_conventions.asciidoc +++ b/docs/api_conventions.asciidoc @@ -21,6 +21,7 @@ NOTE: the https://github.com/fullscale/elastic.js[elastic.js] library can be use * `requestTimeout` -- <> * `maxRetries` -- <> +[[api-conventions-cb]] === Callbacks or Promises When a callback is passed to any of the API methods, it will be called with `(err, response, status)`. If you prefer to use promises, don't pass a callback and a promise will be returned. The promise will either be resolved with the response body, or rejected with the error that occured (including any 300+ response for non "exists" methods). diff --git a/docs/host.asciidoc b/docs/host.asciidoc index 1fdc5d1c4..622476e77 100644 --- a/docs/host.asciidoc +++ b/docs/host.asciidoc @@ -44,42 +44,45 @@ Options ::: `...`:: `Any` -- When the host receives a configuration object, it assigns all of the object's keys to itself. This allows you to pass in arbitrary keys and values that can be used within selectors, or other custom functionality. -`pfx`:: +`ssl`:: +`Object` -- configuration options pertaining to HTTPS/SSL. For more information visit the <> Section. + +`ssl.pfx`:: `String,Array[String]` -- Certificate, Private key and CA certificates to use for SSL. Default ::: `null` -`key`:: +`ssl.key`:: `String` -- Private key to use for SSL. Default ::: `null` -`passphrase`:: +`ssl.passphrase`:: `String` -- A string of passphrase for the private key or pfx. Default ::: `null` -`cert`:: +`ssl.cert`:: `String` -- Public x509 certificate to use. Default ::: `null` -`ca`:: +`ssl.ca`:: `String,Array[String]` -- An authority certificate or array of authority certificates to check the remote host against. Default ::: `null` -`ciphers`:: +`ssl.ciphers`:: `String` -- A string describing the ciphers to use or exclude. Consult http://www.openssl.org/docs/apps/ciphers.html#CIPHER_LIST_FORMAT for details on the format. Default ::: `null` -`rejectUnauthorized`:: +`ssl.rejectUnauthorized`:: `Boolean` -- If true, the server certificate is verified against the list of supplied CAs. An 'error' event is emitted if verification fails. Verification happens at the connection level, before the HTTP request is sent. -Default ::: `true` +Default ::: `false` -`secureProtocol`:: +`ssl.secureProtocol`:: `String` -- The SSL method to use, e.g. TLSv1_method to force TLS version 1. The possible values depend on your installation of OpenSSL and are defined in the constant SSL_METHODS. Default ::: `null` diff --git a/docs/index.asciidoc b/docs/index.asciidoc index f8b5e4667..95469ef29 100644 --- a/docs/index.asciidoc +++ b/docs/index.asciidoc @@ -10,6 +10,8 @@ include::api_conventions.asciidoc[] include::configuration.asciidoc[] +include::ssl_authentication.asciidoc[] + include::extending_core_components.asciidoc[] include::logging.asciidoc[] diff --git a/docs/ssl_authentication.asciidoc b/docs/ssl_authentication.asciidoc new file mode 100644 index 000000000..b4fef1d26 --- /dev/null +++ b/docs/ssl_authentication.asciidoc @@ -0,0 +1,49 @@ +[[auth-reference]] +== SSL and Authentication + +You can configure the client to use SSL for connecting to your elasticsearch cluster, including certificate verification and http auth. + +=== Basic Auth + +Basic authentication credentials can be configured on a per-host basis using URL notiation, or at the `auth:` property of a host config object. + +.Credentials directly in the host url: +[source,js] +----- +var client = new elasticsearch.Client({ + host: 'https://user:password@my-site.com:9200' +}) +----- + +.Credentials as a property of the host config: +[source,js] +----- +var client = new elasticsearch.Client({ + host: [ + { + host: 'es1.internal.org', + auth: 'user:password' + } + ] +}); +----- + +=== HTTPS/SSL + +Without any additional configuration you can specify `https://` host urls, but the certificates used to sign these requests will not verified (`rejectUnauthorized: false`). To turn on certificate verification you must specify an `ssl:` object either in the top level config or in each host config object and set `rejectUnauthorized: true`. The ssl config object can contain many of the same configuration options that http://nodejs.org/docs/latest/api/tls.html#tls_tls_connect_port_host_options_callback[`tls.connect()`] accepts. For convenience these options are also listed in the <> reference. + +.Specify a certificate authority that should be used to verify server certifcates on all nodes: +[source,js] +----- +var client = new elasticsearch.Client({ + hosts: [ + 'https://box1.internal.org', + 'https://box2.internal.org', + 'https://box3.internal.org' + ], + ssl: { + ca: fs.readFileSync('./cacert.pem'), + rejectUnauthorized: true + } +}); +----- \ No newline at end of file From 6b8b9fac982aa11eab7a2d9994a8fe96b3dbcb67 Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Tue, 6 Jan 2015 07:38:03 -0700 Subject: [PATCH 05/11] updated ssl/https docs --- docs/api_conventions.asciidoc | 1 + docs/host.asciidoc | 21 ++++++++------ docs/index.asciidoc | 2 ++ docs/ssl_authentication.asciidoc | 49 ++++++++++++++++++++++++++++++++ src/lib/errors.js | 2 +- 5 files changed, 65 insertions(+), 10 deletions(-) create mode 100644 docs/ssl_authentication.asciidoc diff --git a/docs/api_conventions.asciidoc b/docs/api_conventions.asciidoc index 6afee9f4c..842e6a14b 100755 --- a/docs/api_conventions.asciidoc +++ b/docs/api_conventions.asciidoc @@ -21,6 +21,7 @@ NOTE: the https://github.com/fullscale/elastic.js[elastic.js] library can be use * `requestTimeout` -- <> * `maxRetries` -- <> +[[api-conventions-cb]] === Callbacks or Promises When a callback is passed to any of the API methods, it will be called with `(err, response, status)`. If you prefer to use promises, don't pass a callback and a promise will be returned. The promise will either be resolved with the response body, or rejected with the error that occured (including any 300+ response for non "exists" methods). diff --git a/docs/host.asciidoc b/docs/host.asciidoc index 1fdc5d1c4..622476e77 100644 --- a/docs/host.asciidoc +++ b/docs/host.asciidoc @@ -44,42 +44,45 @@ Options ::: `...`:: `Any` -- When the host receives a configuration object, it assigns all of the object's keys to itself. This allows you to pass in arbitrary keys and values that can be used within selectors, or other custom functionality. -`pfx`:: +`ssl`:: +`Object` -- configuration options pertaining to HTTPS/SSL. For more information visit the <> Section. + +`ssl.pfx`:: `String,Array[String]` -- Certificate, Private key and CA certificates to use for SSL. Default ::: `null` -`key`:: +`ssl.key`:: `String` -- Private key to use for SSL. Default ::: `null` -`passphrase`:: +`ssl.passphrase`:: `String` -- A string of passphrase for the private key or pfx. Default ::: `null` -`cert`:: +`ssl.cert`:: `String` -- Public x509 certificate to use. Default ::: `null` -`ca`:: +`ssl.ca`:: `String,Array[String]` -- An authority certificate or array of authority certificates to check the remote host against. Default ::: `null` -`ciphers`:: +`ssl.ciphers`:: `String` -- A string describing the ciphers to use or exclude. Consult http://www.openssl.org/docs/apps/ciphers.html#CIPHER_LIST_FORMAT for details on the format. Default ::: `null` -`rejectUnauthorized`:: +`ssl.rejectUnauthorized`:: `Boolean` -- If true, the server certificate is verified against the list of supplied CAs. An 'error' event is emitted if verification fails. Verification happens at the connection level, before the HTTP request is sent. -Default ::: `true` +Default ::: `false` -`secureProtocol`:: +`ssl.secureProtocol`:: `String` -- The SSL method to use, e.g. TLSv1_method to force TLS version 1. The possible values depend on your installation of OpenSSL and are defined in the constant SSL_METHODS. Default ::: `null` diff --git a/docs/index.asciidoc b/docs/index.asciidoc index f8b5e4667..95469ef29 100644 --- a/docs/index.asciidoc +++ b/docs/index.asciidoc @@ -10,6 +10,8 @@ include::api_conventions.asciidoc[] include::configuration.asciidoc[] +include::ssl_authentication.asciidoc[] + include::extending_core_components.asciidoc[] include::logging.asciidoc[] diff --git a/docs/ssl_authentication.asciidoc b/docs/ssl_authentication.asciidoc new file mode 100644 index 000000000..b4fef1d26 --- /dev/null +++ b/docs/ssl_authentication.asciidoc @@ -0,0 +1,49 @@ +[[auth-reference]] +== SSL and Authentication + +You can configure the client to use SSL for connecting to your elasticsearch cluster, including certificate verification and http auth. + +=== Basic Auth + +Basic authentication credentials can be configured on a per-host basis using URL notiation, or at the `auth:` property of a host config object. + +.Credentials directly in the host url: +[source,js] +----- +var client = new elasticsearch.Client({ + host: 'https://user:password@my-site.com:9200' +}) +----- + +.Credentials as a property of the host config: +[source,js] +----- +var client = new elasticsearch.Client({ + host: [ + { + host: 'es1.internal.org', + auth: 'user:password' + } + ] +}); +----- + +=== HTTPS/SSL + +Without any additional configuration you can specify `https://` host urls, but the certificates used to sign these requests will not verified (`rejectUnauthorized: false`). To turn on certificate verification you must specify an `ssl:` object either in the top level config or in each host config object and set `rejectUnauthorized: true`. The ssl config object can contain many of the same configuration options that http://nodejs.org/docs/latest/api/tls.html#tls_tls_connect_port_host_options_callback[`tls.connect()`] accepts. For convenience these options are also listed in the <> reference. + +.Specify a certificate authority that should be used to verify server certifcates on all nodes: +[source,js] +----- +var client = new elasticsearch.Client({ + hosts: [ + 'https://box1.internal.org', + 'https://box2.internal.org', + 'https://box3.internal.org' + ], + ssl: { + ca: fs.readFileSync('./cacert.pem'), + rejectUnauthorized: true + } +}); +----- \ No newline at end of file diff --git a/src/lib/errors.js b/src/lib/errors.js index dfc99c0f1..685ff1bf2 100644 --- a/src/lib/errors.js +++ b/src/lib/errors.js @@ -138,7 +138,7 @@ _.each(statusCodes, function (name, status) { var className = _.studlyCase(name); function StatusCodeError(msg) { - ErrorAbstract.call(this, msg || name, errors[className]); + ErrorAbstract.call(this, msg || name, StatusCodeError); } _.inherits(StatusCodeError, ErrorAbstract); From ce7cf87c663cce9ea58609f4ba826aa4fff69489 Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Tue, 6 Jan 2015 08:24:41 -0700 Subject: [PATCH 06/11] version 3.1.0 --- CHANGELOG.md | 3 +++ README.md | 8 ++++---- docs/browser_builds.asciidoc | 2 +- package.json | 2 +- 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index be57bfd64..f54be884f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # elasticsearch-js changelog +## 3.1 (Jan 6 2015) + - Added HTTPS/SSL configuration options and related errors + ## 3.0 (Nov 7 2014) - Added apiVersion `"1.4"`, which is now the default - Improved parsing of `host:` strings, [examples in the tests](https://github.com/elasticsearch/elasticsearch-js/blob/165b7d7986b2184b2e4b73d33bf5803e61ce7a54/test/unit/specs/host.js#L71-L92) diff --git a/README.md b/README.md index cd514dfa2..43f885a21 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ The official low-level Elasticsearch client for Node.js and the browser. -[![Build Status](http://img.shields.io/travis/elasticsearch/elasticsearch-js/2.4.svg?style=flat-square)](https://travis-ci.org/elasticsearch/elasticsearch-js?branch=3.0) -[![Coverage Status](http://img.shields.io/coveralls/elasticsearch/elasticsearch-js/2.4.svg?style=flat-square)](https://coveralls.io/r/elasticsearch/elasticsearch-js?branch=3.0) +[![Build Status](http://img.shields.io/travis/elasticsearch/elasticsearch-js/2.4.svg?style=flat-square)](https://travis-ci.org/elasticsearch/elasticsearch-js?branch=3.1) +[![Coverage Status](http://img.shields.io/coveralls/elasticsearch/elasticsearch-js/2.4.svg?style=flat-square)](https://coveralls.io/r/elasticsearch/elasticsearch-js?branch=3.1) [![Dependencies up to date](http://img.shields.io/david/elasticsearch/elasticsearch-js.svg?style=flat-square)](https://david-dm.org/elasticsearch/elasticsearch-js) ## Features @@ -34,10 +34,10 @@ Check out the [Browser Builds](http://www.elasticsearch.org/guide/en/elasticsear download: -zip +zip -tar.gz +tar.gz diff --git a/docs/browser_builds.asciidoc b/docs/browser_builds.asciidoc index ac7b76934..b76ff983f 100644 --- a/docs/browser_builds.asciidoc +++ b/docs/browser_builds.asciidoc @@ -14,7 +14,7 @@ bower install elasticsearch --------- === Download - * v3.0.2: https://download.elasticsearch.org/elasticsearch/elasticsearch-js/elasticsearch-js-3.0.2.zip[zip], https://download.elasticsearch.org/elasticsearch/elasticsearch-js/elasticsearch-js-3.0.2.tar.gz[tar.gz] + * v3.1.0: https://download.elasticsearch.org/elasticsearch/elasticsearch-js/elasticsearch-js-3.1.0.zip[zip], https://download.elasticsearch.org/elasticsearch/elasticsearch-js/elasticsearch-js-3.1.0.tar.gz[tar.gz] === Angular Build diff --git a/package.json b/package.json index b2c0a4f64..0207ea24c 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,7 @@ "description": "The official low-level Elasticsearch client for Node.js and the browser.", "main": "src/elasticsearch.js", "homepage": "http://www.elasticsearch.org/guide/en/elasticsearch/client/javascript-api/current/index.html", - "version": "3.0.2", + "version": "3.1.0", "browser": { "./src/lib/connectors/index.js": "./src/lib/connectors/browser_index.js", "./src/lib/loggers/index.js": "./src/lib/loggers/browser_index.js", From 2052921d546a35a896f521e25a2461bfe77d391b Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Tue, 6 Jan 2015 11:08:02 -0700 Subject: [PATCH 07/11] moved ssl options to the config page --- docs/configuration.asciidoc | 30 ++++++++++++++++++++++- docs/host.asciidoc | 42 +------------------------------- docs/ssl_authentication.asciidoc | 2 +- 3 files changed, 31 insertions(+), 43 deletions(-) diff --git a/docs/configuration.asciidoc b/docs/configuration.asciidoc index f62a99d8d..e72969156 100644 --- a/docs/configuration.asciidoc +++ b/docs/configuration.asciidoc @@ -30,7 +30,6 @@ Default::: - `log`[[config-log]]:: `String, String[], Object, Object[], Constructor` -- Unless a constructor is specified, this sets the output settings for the bundled logger. See the section on configuring-logging[logging] for more information. Default in Node::: @@ -147,6 +146,35 @@ Defaults::: +`ssl`[[config-ssl]]:: `Object` -- An object defining HTTPS/SSL configuration to use for all nodes. The properties of this mimic the options accepted by http://nodejs.org/docs/latest/api/tls.html#tls_tls_connect_port_host_options_callback[`tls.connect()`] with the exception of `rejectUnauthorized`, which defaults to `false` allowing self-signed certificates to work out-of-the-box. ++ +Additional information available in <>. + +`ssl.pfx`::: `String,Array[String]` -- Certificate, Private key and CA certificates to use for SSL. Default `null`. +`ssl.key`::: `String` -- Private key to use for SSL. Default `null`. +`ssl.passphrase`::: `String` -- A string of passphrase for the private key or pfx. Default `null`. +`ssl.cert`::: `String` -- Public x509 certificate to use. Default `null`. +`ssl.ca`::: `String,Array[String]` -- An authority certificate or array of authority certificates to check the remote host against. Default `null`. +`ssl.ciphers`::: `String` -- A string describing the ciphers to use or exclude. Consult http://www.openssl.org/docs/apps/ciphers.html#CIPHER_LIST_FORMAT for details on the format. Default `null`. +`ssl.rejectUnauthorized`::: `Boolean` -- If true, the server certificate is verified against the list of supplied CAs. An 'error' event is emitted if verification fails. Verification happens at the connection level, before the HTTP request is sent. Default `false` +`ssl.secureProtocol`::: `String` -- The SSL method to use, e.g. TLSv1_method to force TLS version 1. The possible values depend on your installation of OpenSSL and are defined in the constant SSL_METHODS. Default `null`. +Example::: ++ +[source,js] +----- +var client = new elasticsearch.Client({ + hosts: [ + 'https://box1.internal.org', + 'https://box2.internal.org', + 'https://box3.internal.org' + ], + ssl: { + ca: fs.readFileSync('./cacert.pem'), + rejectUnauthorized: true + } +}); +----- + `selector`[[config-selector]]:: `String, Function` -- This function will be used to select a connection from the ConnectionPool. It should received a single argument, the list of "active" connections, and return the connection to use. Use this selector to implement special logic for your client such as preferring nodes in a certain rack or data-center. + diff --git a/docs/host.asciidoc b/docs/host.asciidoc index 622476e77..bbe5f5f27 100644 --- a/docs/host.asciidoc +++ b/docs/host.asciidoc @@ -45,47 +45,7 @@ Options ::: `Any` -- When the host receives a configuration object, it assigns all of the object's keys to itself. This allows you to pass in arbitrary keys and values that can be used within selectors, or other custom functionality. `ssl`:: -`Object` -- configuration options pertaining to HTTPS/SSL. For more information visit the <> Section. - -`ssl.pfx`:: -`String,Array[String]` -- Certificate, Private key and CA certificates to use for SSL. - -Default ::: `null` - -`ssl.key`:: -`String` -- Private key to use for SSL. - -Default ::: `null` - -`ssl.passphrase`:: -`String` -- A string of passphrase for the private key or pfx. - -Default ::: `null` - -`ssl.cert`:: -`String` -- Public x509 certificate to use. - -Default ::: `null` - -`ssl.ca`:: -`String,Array[String]` -- An authority certificate or array of authority certificates to check the remote host against. - -Default ::: `null` - -`ssl.ciphers`:: -`String` -- A string describing the ciphers to use or exclude. Consult http://www.openssl.org/docs/apps/ciphers.html#CIPHER_LIST_FORMAT for details on the format. - -Default ::: `null` - -`ssl.rejectUnauthorized`:: -`Boolean` -- If true, the server certificate is verified against the list of supplied CAs. An 'error' event is emitted if verification fails. Verification happens at the connection level, before the HTTP request is sent. - -Default ::: `false` - -`ssl.secureProtocol`:: -`String` -- The SSL method to use, e.g. TLSv1_method to force TLS version 1. The possible values depend on your installation of OpenSSL and are defined in the constant SSL_METHODS. - -Default ::: `null` +`Object` -- Host specific version of the <>. === Examples diff --git a/docs/ssl_authentication.asciidoc b/docs/ssl_authentication.asciidoc index b4fef1d26..979bde0ae 100644 --- a/docs/ssl_authentication.asciidoc +++ b/docs/ssl_authentication.asciidoc @@ -30,7 +30,7 @@ var client = new elasticsearch.Client({ === HTTPS/SSL -Without any additional configuration you can specify `https://` host urls, but the certificates used to sign these requests will not verified (`rejectUnauthorized: false`). To turn on certificate verification you must specify an `ssl:` object either in the top level config or in each host config object and set `rejectUnauthorized: true`. The ssl config object can contain many of the same configuration options that http://nodejs.org/docs/latest/api/tls.html#tls_tls_connect_port_host_options_callback[`tls.connect()`] accepts. For convenience these options are also listed in the <> reference. +Without any additional configuration you can specify `https://` host urls, but the certificates used to sign these requests will not verified (`rejectUnauthorized: false`). To turn on certificate verification you must specify an `ssl:` object either in the top level config or in each host config object and set `rejectUnauthorized: true`. The ssl config object can contain many of the same configuration options that http://nodejs.org/docs/latest/api/tls.html#tls_tls_connect_port_host_options_callback[`tls.connect()`] accepts. For convenience these options are also listed in the <> reference. .Specify a certificate authority that should be used to verify server certifcates on all nodes: [source,js] From 95c16f3173aa3b6099dfa777dc75d86156eb5c99 Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Tue, 6 Jan 2015 20:19:53 -0700 Subject: [PATCH 08/11] include paramAsBody in the override file to specify params that may be bodies --- scripts/generate/js_api.js | 25 +++++++++++++------ scripts/generate/{aliases.js => overrides.js} | 6 +++++ 2 files changed, 24 insertions(+), 7 deletions(-) rename scripts/generate/{aliases.js => overrides.js} (98%) diff --git a/scripts/generate/js_api.js b/scripts/generate/js_api.js index 883a7d733..3700075c4 100644 --- a/scripts/generate/js_api.js +++ b/scripts/generate/js_api.js @@ -39,11 +39,17 @@ module.exports = function (branch, done) { }()); var esDir = fromRoot('src/_elasticsearch_' + _.snakeCase(branch)); - var aliases = _.transform(require('./aliases'), function (aliases, rule) { - if (semver.satisfies(branchAsVersion, rule.version)) { - _.assign(aliases, rule.aliases); - } - }, {}); + + var overrides = require('./overrides') + .filter(function (rule) { + return semver.satisfies(branchAsVersion, rule.version); + }) + .reduce(function (overrides, rule) { + return _.merge(overrides, _.omit(rule, 'version')); + }, { + aliases: {}, + paramAsBody: {} + }); var steps = [ readSpecFiles, @@ -217,7 +223,7 @@ module.exports = function (branch, done) { spec.requestTimeout = 100; } - var urls = _.difference(def.url.paths, aliases[name]); + var urls = _.difference(def.url.paths, overrides.aliases[name]); var urlSignatures = []; urls = _.map(urls, function (url) { var optionalVars = {}; @@ -271,6 +277,10 @@ module.exports = function (branch, done) { ]); }, {}); + if (overrides.paramAsBody[name]) { + spec.paramAsBody = overrides.paramAsBody[name]; + } + if (_.size(spec.params) === 0) { delete spec.params; } @@ -287,7 +297,8 @@ module.exports = function (branch, done) { 'urls', 'needBody', 'requestTimeout', - 'bulkBody' + 'bulkBody', + 'paramAsBody' ]), location: location, docUrl: def.documentation, diff --git a/scripts/generate/aliases.js b/scripts/generate/overrides.js similarity index 98% rename from scripts/generate/aliases.js rename to scripts/generate/overrides.js index 6321fc4b5..73e79461e 100644 --- a/scripts/generate/aliases.js +++ b/scripts/generate/overrides.js @@ -1,4 +1,10 @@ module.exports = [ + { + version: '*', + paramAsBody: { + scroll: 'scrollId' + } + }, { version: '0.90.x', aliases: { From ebb1ad37e6e7eee349f0981fd17073bbc2e285f2 Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Tue, 6 Jan 2015 20:21:21 -0700 Subject: [PATCH 09/11] regen api files --- src/lib/apis/0_90.js | 1 + src/lib/apis/1_0.js | 1 + src/lib/apis/1_1.js | 1 + src/lib/apis/1_2.js | 1 + src/lib/apis/1_3.js | 1 + src/lib/apis/1_4.js | 1 + src/lib/apis/1_x.js | 1 + src/lib/apis/master.js | 1 + 8 files changed, 8 insertions(+) diff --git a/src/lib/apis/0_90.js b/src/lib/apis/0_90.js index 991410710..fb3a41e73 100644 --- a/src/lib/apis/0_90.js +++ b/src/lib/apis/0_90.js @@ -2902,6 +2902,7 @@ api.scroll = ca({ fmt: '/_search/scroll' } ], + paramAsBody: 'scrollId', method: 'POST' }); diff --git a/src/lib/apis/1_0.js b/src/lib/apis/1_0.js index b130eb211..2353d726a 100644 --- a/src/lib/apis/1_0.js +++ b/src/lib/apis/1_0.js @@ -4477,6 +4477,7 @@ api.scroll = ca({ fmt: '/_search/scroll' } ], + paramAsBody: 'scrollId', method: 'POST' }); diff --git a/src/lib/apis/1_1.js b/src/lib/apis/1_1.js index f9cc4ed23..8b77ca8eb 100644 --- a/src/lib/apis/1_1.js +++ b/src/lib/apis/1_1.js @@ -4574,6 +4574,7 @@ api.scroll = ca({ fmt: '/_search/scroll' } ], + paramAsBody: 'scrollId', method: 'POST' }); diff --git a/src/lib/apis/1_2.js b/src/lib/apis/1_2.js index 4d65b1f9d..eb71429e7 100644 --- a/src/lib/apis/1_2.js +++ b/src/lib/apis/1_2.js @@ -4640,6 +4640,7 @@ api.scroll = ca({ fmt: '/_search/scroll' } ], + paramAsBody: 'scrollId', method: 'POST' }); diff --git a/src/lib/apis/1_3.js b/src/lib/apis/1_3.js index b0ca936a7..88b4b60c5 100644 --- a/src/lib/apis/1_3.js +++ b/src/lib/apis/1_3.js @@ -4765,6 +4765,7 @@ api.scroll = ca({ fmt: '/_search/scroll' } ], + paramAsBody: 'scrollId', method: 'POST' }); diff --git a/src/lib/apis/1_4.js b/src/lib/apis/1_4.js index f5a527483..7b1898ce8 100644 --- a/src/lib/apis/1_4.js +++ b/src/lib/apis/1_4.js @@ -5020,6 +5020,7 @@ api.scroll = ca({ fmt: '/_search/scroll' } ], + paramAsBody: 'scrollId', method: 'POST' }); diff --git a/src/lib/apis/1_x.js b/src/lib/apis/1_x.js index 10e254c93..aec910c43 100644 --- a/src/lib/apis/1_x.js +++ b/src/lib/apis/1_x.js @@ -5030,6 +5030,7 @@ api.scroll = ca({ fmt: '/_search/scroll' } ], + paramAsBody: 'scrollId', method: 'POST' }); diff --git a/src/lib/apis/master.js b/src/lib/apis/master.js index c6dd9036c..92c3a9062 100644 --- a/src/lib/apis/master.js +++ b/src/lib/apis/master.js @@ -5072,6 +5072,7 @@ api.scroll = ca({ fmt: '/_search/scroll' } ], + paramAsBody: 'scrollId', method: 'POST' }); From c7bc19b49a315eecc64e0e6740b2e6c0cc0e7eaa Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Tue, 6 Jan 2015 20:23:50 -0700 Subject: [PATCH 10/11] when a body is not specified, allow a param to fill it's place --- src/lib/client_action.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/lib/client_action.js b/src/lib/client_action.js index 79e45590f..89d3c9a46 100644 --- a/src/lib/client_action.js +++ b/src/lib/client_action.js @@ -191,6 +191,11 @@ function exec(transport, spec, params, cb) { request.requestTimeout = spec.requestTimeout; } + if (!params.body && spec.paramAsBody) { + params.body = params[spec.paramAsBody]; + delete params[spec.paramAsBody]; + } + // verify that we have the body if needed if (spec.needsBody && !params.body) { throw new TypeError('A request body is required.'); From 92605071280d51aad52cc3d92bb123b7f8783725 Mon Sep 17 00:00:00 2001 From: Spencer Alger Date: Fri, 9 Jan 2015 14:35:53 -0700 Subject: [PATCH 11/11] update master and 1.x apis --- src/lib/apis/1_x.js | 38 ++++++++++++++++++++++++++++++++++++++ src/lib/apis/master.js | 42 ++++++++++++++++++++++++++++++++++++++---- 2 files changed, 76 insertions(+), 4 deletions(-) diff --git a/src/lib/apis/1_x.js b/src/lib/apis/1_x.js index aec910c43..0296f63ee 100644 --- a/src/lib/apis/1_x.js +++ b/src/lib/apis/1_x.js @@ -610,6 +610,44 @@ api.cat.prototype.recovery = ca({ ] }); +/** + * Perform a [cat.segments](http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/cat-segments.html) request + * + * @param {Object} params - An object with parameters used to carry out this action + * @param {String, String[], Boolean} params.h - Comma-separated list of column names to display + * @param {Boolean} params.help - Return help information + * @param {Boolean} [params.v=true] - Verbose mode. Display column headers + * @param {String, String[], Boolean} params.index - A comma-separated list of index names to limit the returned information + */ +api.cat.prototype.segments = ca({ + params: { + h: { + type: 'list' + }, + help: { + type: 'boolean', + 'default': false + }, + v: { + type: 'boolean', + 'default': true + } + }, + urls: [ + { + fmt: '/_cat/segments/<%=index%>', + req: { + index: { + type: 'list' + } + } + }, + { + fmt: '/_cat/segments' + } + ] +}); + /** * Perform a [cat.shards](http://www.elasticsearch.org/guide/en/elasticsearch/reference/1.x/cat-shards.html) request * diff --git a/src/lib/apis/master.js b/src/lib/apis/master.js index 92c3a9062..713a1efdd 100644 --- a/src/lib/apis/master.js +++ b/src/lib/apis/master.js @@ -669,6 +669,44 @@ api.cat.prototype.recovery = ca({ ] }); +/** + * Perform a [cat.segments](http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/cat-segments.html) request + * + * @param {Object} params - An object with parameters used to carry out this action + * @param {String, String[], Boolean} params.h - Comma-separated list of column names to display + * @param {Boolean} params.help - Return help information + * @param {Boolean} [params.v=true] - Verbose mode. Display column headers + * @param {String, String[], Boolean} params.index - A comma-separated list of index names to limit the returned information + */ +api.cat.prototype.segments = ca({ + params: { + h: { + type: 'list' + }, + help: { + type: 'boolean', + 'default': false + }, + v: { + type: 'boolean', + 'default': true + } + }, + urls: [ + { + fmt: '/_cat/segments/<%=index%>', + req: { + index: { + type: 'list' + } + } + }, + { + fmt: '/_cat/segments' + } + ] +}); + /** * Perform a [cat.shards](http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/cat-shards.html) request * @@ -5991,7 +6029,6 @@ api.termvectors = ca({ * @param {String} params.consistency - Explicit write consistency setting for the operation * @param {String, String[], Boolean} params.fields - A comma-separated list of fields to return in the response * @param {String} params.lang - The script language (default: groovy) - * @param {String} params.parent - ID of the parent document * @param {Boolean} params.refresh - Refresh the index after performing the operation * @param {String} [params.replication=sync] - Specific replication type * @param {Number} params.retryOnConflict - Specify how many times should the operation be retried when a conflict occurs (default: 0) @@ -6024,9 +6061,6 @@ api.update = ca({ lang: { type: 'string' }, - parent: { - type: 'string' - }, refresh: { type: 'boolean' },