4047
docs/api_methods_1_x.asciidoc
Normal file
4047
docs/api_methods_1_x.asciidoc
Normal file
File diff suppressed because it is too large
Load Diff
@ -21,6 +21,7 @@
|
||||
},
|
||||
"supported_es_branches": [
|
||||
"master",
|
||||
"1.x",
|
||||
"1.1",
|
||||
"1.0",
|
||||
"0.90"
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
module.exports = spawn;
|
||||
module.exports = _spawn;
|
||||
|
||||
var estream = require('event-stream');
|
||||
var chalk = require('chalk');
|
||||
var cp = require('child_process');
|
||||
var spawn = require('child_process').spawn;
|
||||
var path = require('path');
|
||||
var root = path.resolve(__dirname, '../');
|
||||
|
||||
function spawn(cmd, args, opts, cb) {
|
||||
function _spawn(cmd, args, opts, cb) {
|
||||
opts = opts || {};
|
||||
var conf = {
|
||||
stdio: 'pipe'
|
||||
@ -20,26 +20,26 @@ function spawn(cmd, args, opts, cb) {
|
||||
}
|
||||
console.log(chalk.white.bold((subdir ? subdir + ' ' : '') + '$ ') + cmd + ' ' + args.join(' '));
|
||||
|
||||
var proc = cp.spawn(cmd, args, opts);
|
||||
var out = estream.split();
|
||||
var cp = spawn(cmd, args, opts);
|
||||
var split = estream.split();
|
||||
|
||||
if (opts.verbose) {
|
||||
proc.stdout.pipe(out);
|
||||
cp.stdout.pipe(split);
|
||||
} else {
|
||||
proc.stdout.resume();
|
||||
cp.stdout.resume();
|
||||
}
|
||||
|
||||
proc.stderr.pipe(out);
|
||||
cp.stderr.pipe(split);
|
||||
|
||||
out
|
||||
split
|
||||
.pipe(estream.mapSync(function indent(line) {
|
||||
return line ? ' ' + line + '\n' : '';
|
||||
}))
|
||||
.pipe(process.stdout);
|
||||
|
||||
if (typeof cb === 'function') {
|
||||
proc.on('exit', cb);
|
||||
cp.on('exit', cb);
|
||||
}
|
||||
|
||||
return proc;
|
||||
return cp;
|
||||
}
|
||||
@ -48,11 +48,7 @@ if [[ "$NODE_INTEGRATION" != "0" ]]; then
|
||||
group "running integration tests"
|
||||
if [[ -n "$JENKINS" ]]; then
|
||||
# convert TESTING_BRANCH into BRANCH_SUFFIX
|
||||
if [[ $TESTING_BRANCH = 'master' ]]; then
|
||||
BRANCH_SUFFIX=''
|
||||
else
|
||||
BRANCH_SUFFIX="_${TESTING_BRANCH//./_}"
|
||||
fi
|
||||
BRANCH_SUFFIX="_${TESTING_BRANCH//./_}"
|
||||
|
||||
# find value of ES_PORT
|
||||
if [[ -n "$es_port" ]]; then
|
||||
|
||||
@ -13,18 +13,20 @@ fi
|
||||
|
||||
re_nightly='^(.*)_nightly$';
|
||||
re_090='^0\.90\..*$';
|
||||
re_10='^1\.0\..*$';
|
||||
re_1x='^1\.([0-9]+)\..*$';
|
||||
if [[ "$ES_V" =~ $re_nightly ]]; then
|
||||
export ES_BRANCH=${BASH_REMATCH[1]}
|
||||
elif [[ "$ES_V" =~ $re_090 ]]; then
|
||||
export ES_BRANCH='0.90'
|
||||
export ES_RELEASE=$ES_V
|
||||
elif [[ "$ES_V" =~ $re_10 ]]; then
|
||||
export ES_BRANCH='1.0'
|
||||
elif [[ "$ES_V" =~ $re_1x ]]; then
|
||||
export ES_BRANCH="1.${BASH_REMATCH[1]}"
|
||||
export ES_RELEASE=$ES_V
|
||||
else
|
||||
echo "unable to parse ES_V $ES_V"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "ES_BRANCH = $ES_BRANCH , ES_RELEASE = $ES_RELEASE"
|
||||
|
||||
source $HERE/ci.sh
|
||||
5287
src/lib/apis/1_x.js
Normal file
5287
src/lib/apis/1_x.js
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,5 @@
|
||||
module.exports = {
|
||||
'1.1': require('./1_1'),
|
||||
'1.0': require('./1_0'),
|
||||
'0.90': require('./0_90'),
|
||||
_default: '1.1'
|
||||
'0.90': require('./0_90')
|
||||
};
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
module.exports = {
|
||||
'master': require('./master'),
|
||||
'1.x': require('./1_x'),
|
||||
'1.1': require('./1_1'),
|
||||
'1.0': require('./1_0'),
|
||||
'0.90': require('./0_90'),
|
||||
_default: '1.1'
|
||||
'0.90': require('./0_90')
|
||||
};
|
||||
|
||||
@ -3419,7 +3419,8 @@ api.indices.prototype.stats = ca({
|
||||
'search',
|
||||
'segments',
|
||||
'store',
|
||||
'warmer'
|
||||
'warmer',
|
||||
'suggest'
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -3445,7 +3446,8 @@ api.indices.prototype.stats = ca({
|
||||
'search',
|
||||
'segments',
|
||||
'store',
|
||||
'warmer'
|
||||
'warmer',
|
||||
'suggest'
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -4276,7 +4278,8 @@ api.nodes.prototype.stats = ca({
|
||||
'search',
|
||||
'segments',
|
||||
'store',
|
||||
'warmer'
|
||||
'warmer',
|
||||
'suggest'
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -4342,7 +4345,8 @@ api.nodes.prototype.stats = ca({
|
||||
'search',
|
||||
'segments',
|
||||
'store',
|
||||
'warmer'
|
||||
'warmer',
|
||||
'suggest'
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -63,7 +63,7 @@ function Client(config) {
|
||||
delete this._namespaces;
|
||||
}
|
||||
|
||||
EsApiClient.prototype = _.funcEnum(config, 'apiVersion', Client.apis, Client.apis._default);
|
||||
EsApiClient.prototype = _.funcEnum(config, 'apiVersion', Client.apis, '1.1');
|
||||
if (!config.sniffEndpoint && EsApiClient.prototype === Client.apis['0.90']) {
|
||||
config.sniffEndpoint = '/_cluster/nodes';
|
||||
}
|
||||
|
||||
38
test/fixtures/keepalive.js
vendored
38
test/fixtures/keepalive.js
vendored
@ -3,33 +3,24 @@ var Client = require('../../src/elasticsearch').Client;
|
||||
var _ = require('lodash-node');
|
||||
var times = require('async').times;
|
||||
|
||||
var app = require('express')();
|
||||
app.post('/_search', function (req, res) {
|
||||
res.json(200, { hits: { hits: [] } });
|
||||
});
|
||||
|
||||
var server = require('http').createServer(app);
|
||||
server.listen(function () {
|
||||
process.once('message', function (port) {
|
||||
var es = new Client({
|
||||
host: 'http://127.0.0.1:' + server.address().port,
|
||||
host: 'http://127.0.0.1:' + port,
|
||||
log: false
|
||||
});
|
||||
|
||||
var matchAll = {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
};
|
||||
|
||||
times(1000, function (i, done) {
|
||||
times(1000, function (n, done) {
|
||||
es.search({
|
||||
body: matchAll
|
||||
}, _.partial(done, null)); // ignore errors
|
||||
body: {
|
||||
query: {
|
||||
match_all: {}
|
||||
}
|
||||
}
|
||||
}, done);
|
||||
clock.tick(10);
|
||||
}, function () {
|
||||
|
||||
var sockets = _(es.transport.connectionPool._conns.dead)
|
||||
.concat(es.transport.connectionPool._conns.alive)
|
||||
}, function (err) {
|
||||
var conns = es.transport.connectionPool._conns;
|
||||
var sockets = _([].concat(conns.dead, conns.alive))
|
||||
.transform(function (sockets, conn) {
|
||||
[].push.apply(sockets, _.values(conn.agent.sockets));
|
||||
[].push.apply(sockets, _.values(conn.agent.freeSockets));
|
||||
@ -37,13 +28,12 @@ server.listen(function () {
|
||||
.flatten()
|
||||
.value();
|
||||
|
||||
server.close();
|
||||
es.close();
|
||||
|
||||
var out = {
|
||||
socketCount: sockets.length,
|
||||
socketCount: err || sockets.length,
|
||||
remaining: _.where(sockets, { destroyed: true }).length - sockets.length,
|
||||
timeouts: _.size(clock.timeouts) && clock.timeouts
|
||||
timeouts: _.size(clock.timeouts) && _.pluck(clock.timeouts, 'func').map(String)
|
||||
};
|
||||
|
||||
clock.restore();
|
||||
|
||||
16
test/fixtures/keepalive_server.js
vendored
Normal file
16
test/fixtures/keepalive_server.js
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
// fake es server for the keepalive test script
|
||||
|
||||
// Node 0.9.25 uses timeouts for outgoing messages
|
||||
// which prevent sinon from being able to ensure
|
||||
// timeouts aren't being left behind
|
||||
|
||||
var express = require('express');
|
||||
var app = express().post('/_search', function (req, res) {
|
||||
res.json(200, { hits: { hits: [] } });
|
||||
});
|
||||
|
||||
var server = require('http').createServer(app);
|
||||
server.listen(function () {
|
||||
var port = server.address().port;
|
||||
process.connected ? process.send(port) : console.log(port);
|
||||
});
|
||||
@ -128,28 +128,27 @@ function YamlDoc(doc, file) {
|
||||
action.bound = _.bind(method, self, action.args);
|
||||
|
||||
// create a function that can be passed to mocha or async
|
||||
action.testable = function (done) {
|
||||
action.testable = function (_cb) {
|
||||
function done(err) {
|
||||
process.nextTick(function () {
|
||||
if (err) {
|
||||
err.message += ' in ' + action.name;
|
||||
}
|
||||
_cb(err);
|
||||
});
|
||||
}
|
||||
|
||||
if (self.skipping || self.file.skipping) {
|
||||
return done();
|
||||
}
|
||||
if (method.length > 1) {
|
||||
action.bound(function (err) {
|
||||
if (err) {
|
||||
err.message += ' in ' + action.name;
|
||||
}
|
||||
process.nextTick(function () {
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
action.bound(done);
|
||||
} else {
|
||||
try {
|
||||
action.bound();
|
||||
process.nextTick(done);
|
||||
} catch (err) {
|
||||
err.message += ' in ' + action.name;
|
||||
process.nextTick(function () {
|
||||
done(err);
|
||||
});
|
||||
done(err);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -371,19 +371,35 @@ describe('Http Connector', function () {
|
||||
|
||||
describe('Connection cleanup', function () {
|
||||
it('destroys any connections created', function (done) {
|
||||
this.timeout(10000);
|
||||
this.timeout(null);
|
||||
var cp = require('child_process');
|
||||
var path = require('path');
|
||||
var es = require('event-stream');
|
||||
var fixtures = path.join(__dirname, '../../fixtures/');
|
||||
var timeout; // start the timeout once we hear back from the client
|
||||
|
||||
var proc = cp.fork(path.join(__dirname, '../../fixtures/keepalive.js'));
|
||||
var server = cp.fork(fixtures + 'keepalive_server.js');
|
||||
var client = cp.fork(fixtures + 'keepalive.js');
|
||||
|
||||
proc.on('message', function (output) {
|
||||
proc.kill();
|
||||
server.on('message', function (port) {
|
||||
client.send(port);
|
||||
});
|
||||
|
||||
client.on('message', function (output) {
|
||||
expect(output).to.have.property('remaining', 0);
|
||||
expect(output).to.have.property('timeouts', 0);
|
||||
server.kill('SIGKILL');
|
||||
if (client.connected) {
|
||||
client.disconnect();
|
||||
}
|
||||
|
||||
timeout = setTimeout(function () {
|
||||
client.removeListener('exit');
|
||||
done(new Error('process should have closed by now'));
|
||||
}, 2000);
|
||||
});
|
||||
|
||||
client.on('exit', function () {
|
||||
clearTimeout(timeout);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user