Added more unit tests, up to 98% coverage. Fixed the Gruntfile so it's not a cluster-f**k anymore.
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@ -4,6 +4,7 @@ npm-debug.log
|
||||
node_modules
|
||||
scripts/scratch*
|
||||
test/integration/yaml_suite/log
|
||||
.aws-config.json
|
||||
|
||||
## generated files
|
||||
scripts/last_rest_spec_update.sha
|
||||
|
||||
123
Gruntfile.js
123
Gruntfile.js
@ -2,114 +2,23 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = function (grunt) {
|
||||
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
distDir: 'dist',
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
meta: {
|
||||
banner: '/*! <%= pkg.name %> - v<%= pkg.version %> - ' +
|
||||
'<%= grunt.template.today("yyyy-mm-dd") %>\n' +
|
||||
'<%= pkg.homepage ? " * " + pkg.homepage + "\\n" : "" %>' +
|
||||
' * Copyright (c) <%= grunt.template.today("yyyy") %> <%= pkg.author.name %>;' +
|
||||
' Licensed <%= pkg.license %> */\n' +
|
||||
' // built using browserify\n\n'
|
||||
},
|
||||
clean: {
|
||||
dist: {
|
||||
src: ['<%= distDir %>']
|
||||
}
|
||||
},
|
||||
jshint: {
|
||||
source: {
|
||||
src: [
|
||||
'src/**/*.js',
|
||||
'scripts/**/*.js',
|
||||
'test/**/*.js -test/browser_integration/yaml_tests.js',
|
||||
'Gruntfile.js'
|
||||
],
|
||||
options: {
|
||||
jshintrc: true
|
||||
}
|
||||
}
|
||||
},
|
||||
browserify: {
|
||||
client: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.js': 'src/elasticsearch.js'
|
||||
},
|
||||
options: {
|
||||
standalone: 'elasticsearch',
|
||||
ignore: [
|
||||
'src/lib/connectors/jquery.js',
|
||||
'src/lib/connectors/angular.js'
|
||||
]
|
||||
}
|
||||
},
|
||||
angular: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.angular.js': ['src/elasticsearch.angular.js']
|
||||
},
|
||||
options: {
|
||||
ignore: [
|
||||
'src/lib/connectors/jquery.js',
|
||||
'src/lib/connectors/xhr.js',
|
||||
'when'
|
||||
]
|
||||
}
|
||||
},
|
||||
jquery: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.jquery.js': ['src/elasticsearch.jquery.js']
|
||||
},
|
||||
options: {
|
||||
ignore: [
|
||||
'src/lib/connectors/angular.js',
|
||||
'src/lib/connectors/xhr.js',
|
||||
'when'
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
uglify: {
|
||||
dist: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.min.js': '<%= distDir %>/elasticsearch.js',
|
||||
'<%= distDir %>/elasticsearch.angular.min.js': '<%= distDir %>/elasticsearch.angular.js',
|
||||
'<%= distDir %>/elasticsearch.jquery.min.js': '<%= distDir %>/elasticsearch.jquery.js'
|
||||
}
|
||||
}
|
||||
},
|
||||
concat: {
|
||||
dist_banners: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.angular.js': '<%= distDir %>/elasticsearch.angular.js',
|
||||
'<%= distDir %>/elasticsearch.angular.min.js': '<%= distDir %>/elasticsearch.angular.min.js',
|
||||
'<%= distDir %>/elasticsearch.jquery.js': '<%= distDir %>/elasticsearch.jquery.js',
|
||||
'<%= distDir %>/elasticsearch.jquery.min.js': '<%= distDir %>/elasticsearch.jquery.min.js',
|
||||
'<%= distDir %>/elasticsearch.js': '<%= distDir %>/elasticsearch.js',
|
||||
'<%= distDir %>/elasticsearch.min.js': '<%= distDir %>/elasticsearch.min.js'
|
||||
},
|
||||
options: {
|
||||
banner: '<%= meta.banner %>'
|
||||
}
|
||||
// load plugins
|
||||
require('load-grunt-config')(grunt, {
|
||||
configPath: require('path').join(__dirname, 'grunt/config'),
|
||||
init: true,
|
||||
config: {
|
||||
distDir: 'dist',
|
||||
meta: {
|
||||
banner: '/*! <%= package.name %> - v<%= package.version %> - ' +
|
||||
'<%= grunt.template.today("yyyy-mm-dd") %>\n' +
|
||||
'<%= package.homepage ? " * " + package.homepage + "\\n" : "" %>' +
|
||||
' * Copyright (c) <%= grunt.template.today("yyyy") %> <%= package.author.name %>;' +
|
||||
' Licensed <%= package.license %> */\n' +
|
||||
' // built using browserify\n\n'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// load plugins
|
||||
grunt.loadNpmTasks('grunt-browserify');
|
||||
grunt.loadNpmTasks('grunt-contrib-clean');
|
||||
grunt.loadNpmTasks('grunt-contrib-concat');
|
||||
grunt.loadNpmTasks('grunt-contrib-uglify');
|
||||
grunt.loadNpmTasks('grunt-contrib-jshint');
|
||||
|
||||
// Default task runs the build process.
|
||||
grunt.registerTask('default', [
|
||||
'clean:dist',
|
||||
'browserify',
|
||||
'uglify:dist',
|
||||
'concat:dist_banners'
|
||||
]);
|
||||
|
||||
};
|
||||
// load task definitions
|
||||
grunt.loadTasks('grunt');
|
||||
};
|
||||
8
grunt/build.js
Normal file
8
grunt/build.js
Normal file
@ -0,0 +1,8 @@
|
||||
module.exports = function (grunt) {
|
||||
grunt.registerTask('build', [
|
||||
'clean:dist',
|
||||
'browserify',
|
||||
'uglify:dist',
|
||||
'concat:dist_banners'
|
||||
]);
|
||||
};
|
||||
38
grunt/config/browserify.js
Normal file
38
grunt/config/browserify.js
Normal file
@ -0,0 +1,38 @@
|
||||
module.exports = {
|
||||
browser_client: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.js': 'src/elasticsearch.js'
|
||||
},
|
||||
options: {
|
||||
standalone: 'elasticsearch',
|
||||
ignore: [
|
||||
'src/lib/connectors/jquery.js',
|
||||
'src/lib/connectors/angular.js'
|
||||
]
|
||||
}
|
||||
},
|
||||
angular_client: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.angular.js': ['src/elasticsearch.angular.js']
|
||||
},
|
||||
options: {
|
||||
ignore: [
|
||||
'src/lib/connectors/jquery.js',
|
||||
'src/lib/connectors/xhr.js',
|
||||
'when'
|
||||
]
|
||||
}
|
||||
},
|
||||
jquery_client: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.jquery.js': ['src/elasticsearch.jquery.js']
|
||||
},
|
||||
options: {
|
||||
ignore: [
|
||||
'src/lib/connectors/angular.js',
|
||||
'src/lib/connectors/xhr.js',
|
||||
'when'
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
5
grunt/config/clean.js
Normal file
5
grunt/config/clean.js
Normal file
@ -0,0 +1,5 @@
|
||||
module.exports = {
|
||||
dist: {
|
||||
src: ['<%= distDir %>']
|
||||
}
|
||||
};
|
||||
15
grunt/config/concat.js
Normal file
15
grunt/config/concat.js
Normal file
@ -0,0 +1,15 @@
|
||||
module.exports = {
|
||||
dist_banners: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.angular.js': '<%= distDir %>/elasticsearch.angular.js',
|
||||
'<%= distDir %>/elasticsearch.angular.min.js': '<%= distDir %>/elasticsearch.angular.min.js',
|
||||
'<%= distDir %>/elasticsearch.jquery.js': '<%= distDir %>/elasticsearch.jquery.js',
|
||||
'<%= distDir %>/elasticsearch.jquery.min.js': '<%= distDir %>/elasticsearch.jquery.min.js',
|
||||
'<%= distDir %>/elasticsearch.js': '<%= distDir %>/elasticsearch.js',
|
||||
'<%= distDir %>/elasticsearch.min.js': '<%= distDir %>/elasticsearch.min.js'
|
||||
},
|
||||
options: {
|
||||
banner: '<%= meta.banner %>'
|
||||
}
|
||||
}
|
||||
};
|
||||
13
grunt/config/jshint.js
Normal file
13
grunt/config/jshint.js
Normal file
@ -0,0 +1,13 @@
|
||||
module.exports = {
|
||||
source: {
|
||||
src: [
|
||||
'src/**/*.js',
|
||||
'scripts/**/*.js',
|
||||
'test/**/*.js -test/browser_integration/yaml_tests.js',
|
||||
'Gruntfile.js'
|
||||
],
|
||||
options: {
|
||||
jshintrc: true
|
||||
}
|
||||
}
|
||||
};
|
||||
29
grunt/config/s3.js
Normal file
29
grunt/config/s3.js
Normal file
@ -0,0 +1,29 @@
|
||||
var config = require('../../.aws-config.json');
|
||||
|
||||
module.exports = {
|
||||
options: {
|
||||
key: config.key,
|
||||
secret: config.secret,
|
||||
bucket: 'download.elasticsearch.org',
|
||||
access: 'public-read'
|
||||
},
|
||||
|
||||
latest: {
|
||||
upload: [
|
||||
{
|
||||
src: '<%= distDir %>/*.js',
|
||||
dest: 'elasticsearch/elasticsearch-js/latest'
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
release: {
|
||||
upload: [
|
||||
{
|
||||
src: '<%= distDir %>/*.js',
|
||||
dest: 'elasticsearch/elasticsearch-js/<%= package.version %>'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
};
|
||||
9
grunt/config/uglify.js
Normal file
9
grunt/config/uglify.js
Normal file
@ -0,0 +1,9 @@
|
||||
module.exports = {
|
||||
dist: {
|
||||
files: {
|
||||
'<%= distDir %>/elasticsearch.min.js': '<%= distDir %>/elasticsearch.js',
|
||||
'<%= distDir %>/elasticsearch.angular.min.js': '<%= distDir %>/elasticsearch.angular.js',
|
||||
'<%= distDir %>/elasticsearch.jquery.min.js': '<%= distDir %>/elasticsearch.jquery.js'
|
||||
}
|
||||
}
|
||||
};
|
||||
8
grunt/default.js
Normal file
8
grunt/default.js
Normal file
@ -0,0 +1,8 @@
|
||||
module.exports = function (grunt) {
|
||||
|
||||
// Default task runs the build process.
|
||||
grunt.registerTask('default', [
|
||||
'build'
|
||||
]);
|
||||
|
||||
};
|
||||
8
grunt/publish.js
Normal file
8
grunt/publish.js
Normal file
@ -0,0 +1,8 @@
|
||||
module.exports = function (grunt) {
|
||||
|
||||
grunt.registerTask('publish', [
|
||||
'build',
|
||||
's3:latest'
|
||||
]);
|
||||
|
||||
};
|
||||
8
grunt/release.js
Normal file
8
grunt/release.js
Normal file
@ -0,0 +1,8 @@
|
||||
module.exports = function (grunt) {
|
||||
|
||||
grunt.registerTask('release', [
|
||||
'build',
|
||||
's3:release'
|
||||
]);
|
||||
|
||||
};
|
||||
11
package.json
11
package.json
@ -38,15 +38,22 @@
|
||||
"sinon": "~1.7.3",
|
||||
"nock": "~0.23.0",
|
||||
"open": "0.0.4",
|
||||
"testling": "https://github.com/spenceralger/testling/tarball/master"
|
||||
"testling": "git://github.com/spenceralger/testling.git",
|
||||
"load-grunt-tasks": "~0.2.0",
|
||||
"load-grunt-config": "~0.7.0",
|
||||
"grunt-s3": "~0.2.0-alpha.3"
|
||||
},
|
||||
"license": "Apache License",
|
||||
"license": "Apache 2.0",
|
||||
"dependencies": {
|
||||
"when": "~2.6.0",
|
||||
"lodash": "~2.3.0",
|
||||
"agentkeepalive": "~0.1",
|
||||
"chalk": "~0.3.0"
|
||||
},
|
||||
"repository": {
|
||||
"type" : "git",
|
||||
"url" : "http://github.com/elasticsearch/elasticsearch-js.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node scripts/run_tests.js --integration --unit --browsers=chrome,safari,firefox,opera",
|
||||
"coverage": "mocha test/unit/test_*.js --require blanket -R html-cov > coverage.html && open -a \"Google Chrome\" ./coverage.html",
|
||||
|
||||
@ -15,9 +15,40 @@ module.exports = function (argv, steps) {
|
||||
}
|
||||
|
||||
var tasks = {
|
||||
exec: function (params, exitCb) {
|
||||
var cmd = params.cmd;
|
||||
var opts = {};
|
||||
|
||||
if (params.cwd) {
|
||||
opts.cwd = path.resolve(params.cwd);
|
||||
}
|
||||
|
||||
log('running', cmd, (opts.cwd ? 'in ' + opts.cwd : ''));
|
||||
|
||||
cp.exec(cmd, opts, function (err, stdout, stderr) {
|
||||
stdout = stdout.trim();
|
||||
stderr = stderr.trim();
|
||||
|
||||
if (err) {
|
||||
console.error('Error! status:', err.code, ' -----\n' + err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
else {
|
||||
if (argv.verbose) {
|
||||
if (stderr) {
|
||||
console.error('----------- STDERR -----------');
|
||||
console.error(stdout);
|
||||
console.error('------------------------------');
|
||||
}
|
||||
console.log(stdout);
|
||||
}
|
||||
exitCb();
|
||||
}
|
||||
});
|
||||
},
|
||||
run: function (params, exitCb) {
|
||||
var cmd = params.cmd;
|
||||
var args = params.args;
|
||||
var args = params.args || [];
|
||||
var opts = {
|
||||
stdio: argv.verbose ? 'inherit' : 'ignore'
|
||||
};
|
||||
|
||||
24
scripts/generate/_force.js
Normal file
24
scripts/generate/_force.js
Normal file
@ -0,0 +1,24 @@
|
||||
var force = process.env.FORCE || process.env.FORCE_GEN;
|
||||
|
||||
if (!force) {
|
||||
var argv = require('optimist')
|
||||
.options({
|
||||
force: {
|
||||
alias: 'f',
|
||||
default: false,
|
||||
boolean: true
|
||||
}
|
||||
});
|
||||
|
||||
if (process.env.npm_config_argv) {
|
||||
// when called by NPM
|
||||
argv = argv.parse(JSON.parse(process.env.npm_config_argv).original);
|
||||
} else {
|
||||
// when called directly
|
||||
argv = argv.argv;
|
||||
}
|
||||
|
||||
force = argv.force;
|
||||
}
|
||||
|
||||
module.exports = force;
|
||||
59
scripts/generate/js_api/generate.js
Normal file
59
scripts/generate/js_api/generate.js
Normal file
@ -0,0 +1,59 @@
|
||||
module.exports = function (force) {
|
||||
var _ = require('../../../src/lib/utils');
|
||||
var fs = require('fs');
|
||||
var templates = require('./templates');
|
||||
var restSpecUpdated = require('../../rest_spec_updated');
|
||||
|
||||
var outputPath = _.joinPath(__dirname, '../../../src/lib/api.js');
|
||||
var docOutputDir = _.joinPath(__dirname, '../../../docs/');
|
||||
|
||||
function download() {
|
||||
require('./actions').on('ready', function (actions) {
|
||||
var namespaces = _.filter(_.map(actions, function (action) {
|
||||
if (~action.location.indexOf('.')) {
|
||||
var path = action.location.split('.').slice(0, -1);
|
||||
_.pull(path, 'prototype');
|
||||
return path.join('.');
|
||||
}
|
||||
}));
|
||||
|
||||
// seperate the proxy actions
|
||||
var groups = _.groupBy(actions, function (action) {
|
||||
return action.proxy ? 'proxies' : 'normal';
|
||||
});
|
||||
|
||||
fs.unlink(outputPath, function () {
|
||||
console.log('writing', actions.length, 'api actions to', outputPath);
|
||||
|
||||
fs.writeFileSync(outputPath, templates.apiFile({
|
||||
actions: groups.normal,
|
||||
proxies: groups.proxies,
|
||||
namespaces: _.unique(namespaces.sort(), true)
|
||||
}));
|
||||
|
||||
if (!fs.existsSync(docOutputDir)) {
|
||||
fs.mkdirSync(docOutputDir);
|
||||
}
|
||||
|
||||
fs.writeFileSync(docOutputDir + '_method_list.jade', templates.apiMethodList({
|
||||
actions: actions
|
||||
}));
|
||||
|
||||
fs.writeFileSync(docOutputDir + '_methods.jade', templates.apiMethods({
|
||||
actions: actions
|
||||
}));
|
||||
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
if (force) {
|
||||
download();
|
||||
} else {
|
||||
restSpecUpdated(function (err, updated) {
|
||||
if (err || updated) {
|
||||
download();
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
@ -1,53 +1 @@
|
||||
var _ = require('../../../src/lib/utils');
|
||||
var fs = require('fs');
|
||||
var templates = require('./templates');
|
||||
var restSpecUpdated = require('../../rest_spec_updated');
|
||||
|
||||
var outputPath = _.joinPath(__dirname, '../../../src/lib/api.js');
|
||||
var docOutputDir = _.joinPath(__dirname, '../../../docs/');
|
||||
|
||||
function download() {
|
||||
require('./actions').on('ready', function (actions) {
|
||||
var namespaces = _.filter(_.map(actions, function (action) {
|
||||
if (~action.location.indexOf('.')) {
|
||||
var path = action.location.split('.').slice(0, -1);
|
||||
_.pull(path, 'prototype');
|
||||
return path.join('.');
|
||||
}
|
||||
}));
|
||||
|
||||
// seperate the proxy actions
|
||||
var groups = _.groupBy(actions, function (action) {
|
||||
return action.proxy ? 'proxies' : 'normal';
|
||||
});
|
||||
|
||||
fs.unlink(outputPath, function () {
|
||||
console.log('writing', actions.length, 'api actions to', outputPath);
|
||||
|
||||
fs.writeFileSync(outputPath, templates.apiFile({
|
||||
actions: groups.normal,
|
||||
proxies: groups.proxies,
|
||||
namespaces: _.unique(namespaces.sort(), true)
|
||||
}));
|
||||
|
||||
if (!fs.existsSync(docOutputDir)) {
|
||||
fs.mkdirSync(docOutputDir);
|
||||
}
|
||||
|
||||
fs.writeFileSync(docOutputDir + '_method_list.jade', templates.apiMethodList({
|
||||
actions: actions
|
||||
}));
|
||||
|
||||
fs.writeFileSync(docOutputDir + '_methods.jade', templates.apiMethods({
|
||||
actions: actions
|
||||
}));
|
||||
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
restSpecUpdated(function (err, updated) {
|
||||
if (err || updated) {
|
||||
download();
|
||||
}
|
||||
});
|
||||
require('./generate')(require('../_force'));
|
||||
44
scripts/generate/yaml_tests/generate.js
Normal file
44
scripts/generate/yaml_tests/generate.js
Normal file
@ -0,0 +1,44 @@
|
||||
module.exports = function (force) {
|
||||
/**
|
||||
* Check that the test directory exists, and is less than a day old, otherwise wipe it out
|
||||
* and rebuild
|
||||
*/
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var jsYaml = require('js-yaml');
|
||||
var spec = require('../../get_spec');
|
||||
var restSpecUpdated = require('../../rest_spec_updated');
|
||||
|
||||
var testFile = path.resolve(__dirname, '../../../test/integration/yaml_suite/yaml_tests.json');
|
||||
|
||||
function download() {
|
||||
|
||||
var tests = {};
|
||||
|
||||
fs.unlink(testFile, function () {
|
||||
spec.get('test/**/*.yaml')
|
||||
.on('entry', function (entry) {
|
||||
var filename = path.relative('test', entry.path);
|
||||
var file = tests[filename] = [];
|
||||
jsYaml.loadAll(entry.data, function (doc) {
|
||||
file.push(doc);
|
||||
});
|
||||
})
|
||||
.on('end', function () {
|
||||
fs.writeFileSync(testFile, JSON.stringify(tests, null, ' '), 'utf8');
|
||||
console.log('download yaml tests to', testFile);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if (force) {
|
||||
download();
|
||||
} else {
|
||||
restSpecUpdated(function (err, updated) {
|
||||
if (err || updated) {
|
||||
download();
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
@ -1,38 +1 @@
|
||||
/**
|
||||
* Check that the test directory exists, and is less than a day old, otherwise wipe it out
|
||||
* and rebuild
|
||||
*/
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var jsYaml = require('js-yaml');
|
||||
var spec = require('../../get_spec');
|
||||
var restSpecUpdated = require('../../rest_spec_updated');
|
||||
|
||||
var testFile = path.resolve(__dirname, '../../../test/integration/yaml_suite/yaml_tests.json');
|
||||
|
||||
function download() {
|
||||
|
||||
var tests = {};
|
||||
|
||||
fs.unlink(testFile, function () {
|
||||
spec.get('test/**/*.yaml')
|
||||
.on('entry', function (entry) {
|
||||
var filename = path.relative('test', entry.path);
|
||||
var file = tests[filename] = [];
|
||||
jsYaml.loadAll(entry.data, function (doc) {
|
||||
file.push(doc);
|
||||
});
|
||||
})
|
||||
.on('end', function () {
|
||||
fs.writeFileSync(testFile, JSON.stringify(tests, null, ' '), 'utf8');
|
||||
console.log('download yaml tests to', testFile);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
restSpecUpdated(function (err, updated) {
|
||||
if (err || updated) {
|
||||
download();
|
||||
}
|
||||
});
|
||||
require('./generate')(require('../_force'));
|
||||
@ -7,7 +7,6 @@ var request = {
|
||||
}
|
||||
};
|
||||
var fs = require('fs');
|
||||
var _ = require('lodash');
|
||||
|
||||
var lastRestSpecUpdateFile = __dirname + '/last_rest_spec_update.sha';
|
||||
var lastRestSpecUpdate;
|
||||
@ -17,61 +16,46 @@ if (fs.existsSync(lastRestSpecUpdateFile)) {
|
||||
lastRestSpecUpdate = fs.readFileSync(lastRestSpecUpdateFile, 'utf8');
|
||||
}
|
||||
|
||||
var req = null;
|
||||
var force = false;
|
||||
var req = https.get(request, function (incoming) {
|
||||
if (incoming.statusCode !== 200) {
|
||||
req.abort();
|
||||
console.error('request for last commit failed', incoming.statusCode, incoming.headers);
|
||||
return;
|
||||
}
|
||||
|
||||
if (process.env.npm_config_force ||
|
||||
process.env.FORCE_GEN ||
|
||||
_.contains(process.argv, '-f') ||
|
||||
_.contains(process.argv, '--force')
|
||||
) {
|
||||
force = true;
|
||||
}
|
||||
var body = '';
|
||||
|
||||
if (force) {
|
||||
updated = true;
|
||||
} else {
|
||||
req = https.get(request, function (incoming) {
|
||||
if (incoming.statusCode !== 200) {
|
||||
req.abort();
|
||||
console.error('request for last commit failed', incoming.statusCode, incoming.headers);
|
||||
incoming.on('data', onData);
|
||||
incoming.on('end', onEnd);
|
||||
|
||||
function onData(chunk) {
|
||||
body += chunk;
|
||||
}
|
||||
|
||||
function onEnd() {
|
||||
incoming.removeListener('data', onData);
|
||||
incoming.removeListener('end', onEnd);
|
||||
var _req = req;
|
||||
req = null;
|
||||
|
||||
var resp;
|
||||
try {
|
||||
resp = JSON.parse(body);
|
||||
} catch (e) {
|
||||
console.log('unable to parse response from github');
|
||||
_req.emit('ready');
|
||||
return;
|
||||
}
|
||||
|
||||
var body = '';
|
||||
|
||||
incoming.on('data', onData);
|
||||
incoming.on('end', onEnd);
|
||||
|
||||
function onData(chunk) {
|
||||
body += chunk;
|
||||
if (lastRestSpecUpdate === resp.sha) {
|
||||
updated = false;
|
||||
} else {
|
||||
updated = true;
|
||||
fs.writeFileSync(lastRestSpecUpdateFile, resp.sha);
|
||||
}
|
||||
|
||||
function onEnd() {
|
||||
incoming.removeListener('data', onData);
|
||||
incoming.removeListener('end', onEnd);
|
||||
var _req = req;
|
||||
req = null;
|
||||
|
||||
var resp;
|
||||
try {
|
||||
resp = JSON.parse(body);
|
||||
} catch (e) {
|
||||
console.log('unable to parse response from github');
|
||||
_req.emit('ready');
|
||||
return;
|
||||
}
|
||||
|
||||
if (lastRestSpecUpdate === resp.sha) {
|
||||
updated = false;
|
||||
} else {
|
||||
updated = true;
|
||||
fs.writeFileSync(lastRestSpecUpdateFile, resp.sha);
|
||||
}
|
||||
_req.emit('ready');
|
||||
}
|
||||
});
|
||||
}
|
||||
_req.emit('ready');
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = function (cb) {
|
||||
function done() {
|
||||
|
||||
37
scripts/run_in_bower_packages.js
Normal file
37
scripts/run_in_bower_packages.js
Normal file
@ -0,0 +1,37 @@
|
||||
var fs = require('fs');
|
||||
|
||||
var argv = require('optimist')
|
||||
.default({
|
||||
verbose: false
|
||||
})
|
||||
.alias({
|
||||
v: 'verbose'
|
||||
})
|
||||
.argv;
|
||||
|
||||
var steps = [];
|
||||
|
||||
var cmd = argv._.join(' ');
|
||||
|
||||
if (!cmd) {
|
||||
throw new Error('you should specify a command...');
|
||||
}
|
||||
|
||||
['browser', 'jquery', 'angular'].forEach(function (build) {
|
||||
|
||||
if (!fs.existsSync('../bower-elasticsearch-' + build) ||
|
||||
!fs.existsSync('../bower-elasticsearch-' + build + '/.git')
|
||||
) {
|
||||
throw new Error('Ensure that all of the bower repos are checked out next to this repo');
|
||||
}
|
||||
|
||||
steps.push([
|
||||
'exec', {
|
||||
cmd: cmd,
|
||||
cwd: '../bower-elasticsearch-' + build
|
||||
}
|
||||
]);
|
||||
|
||||
});
|
||||
|
||||
require('./_steps')(argv, steps);
|
||||
@ -14,6 +14,7 @@ angular.module('elasticsearch.client', [])
|
||||
.factory('esFactory', ['$http', '$q', function ($http, $q) {
|
||||
|
||||
AngularConnector.prototype.$http = $http;
|
||||
AngularConnector.prototype.$q = $q;
|
||||
|
||||
var factory = function (config) {
|
||||
config = config || {};
|
||||
|
||||
@ -812,9 +812,10 @@ api.exists = ca({
|
||||
* @param {String} params.preference - Specify the node or shard the operation should be performed on (default: random)
|
||||
* @param {String} params.q - Query in the Lucene query string syntax
|
||||
* @param {String} params.routing - Specific routing value
|
||||
* @param {String, String[], Boolean} params.source - True or false to return the _source field or not, or a list of fields to return
|
||||
* @param {String, String[], Boolean} params.sourceExclude - A list of fields to exclude from the returned _source field
|
||||
* @param {String, String[], Boolean} params.sourceInclude - A list of fields to extract and return from the _source field
|
||||
* @param {String} params.source - The URL-encoded query definition (instead of using the request body)
|
||||
* @param {String, String[], Boolean} params._source - True or false to return the _source field or not, or a list of fields to return
|
||||
* @param {String, String[], Boolean} params._sourceExclude - A list of fields to exclude from the returned _source field
|
||||
* @param {String, String[], Boolean} params._sourceInclude - A list of fields to extract and return from the _source field
|
||||
* @param {String} params.id - The document ID
|
||||
* @param {String} params.index - The name of the index
|
||||
* @param {String} params.type - The type of the document
|
||||
@ -863,14 +864,16 @@ api.explain = ca({
|
||||
type: 'string'
|
||||
},
|
||||
source: {
|
||||
type: 'list',
|
||||
name: '_source'
|
||||
type: 'string'
|
||||
},
|
||||
sourceExclude: {
|
||||
_source: {
|
||||
type: 'list'
|
||||
},
|
||||
_sourceExclude: {
|
||||
type: 'list',
|
||||
name: '_source_exclude'
|
||||
},
|
||||
sourceInclude: {
|
||||
_sourceInclude: {
|
||||
type: 'list',
|
||||
name: '_source_include'
|
||||
}
|
||||
@ -904,9 +907,9 @@ api.explain = ca({
|
||||
* @param {Boolean} params.realtime - Specify whether to perform the operation in realtime or search mode
|
||||
* @param {Boolean} params.refresh - Refresh the shard containing the document before performing the operation
|
||||
* @param {String} params.routing - Specific routing value
|
||||
* @param {String, String[], Boolean} params.source - True or false to return the _source field or not, or a list of fields to return
|
||||
* @param {String, String[], Boolean} params.sourceExclude - A list of fields to exclude from the returned _source field
|
||||
* @param {String, String[], Boolean} params.sourceInclude - A list of fields to extract and return from the _source field
|
||||
* @param {String, String[], Boolean} params._source - True or false to return the _source field or not, or a list of fields to return
|
||||
* @param {String, String[], Boolean} params._sourceExclude - A list of fields to exclude from the returned _source field
|
||||
* @param {String, String[], Boolean} params._sourceInclude - A list of fields to extract and return from the _source field
|
||||
* @param {String} params.id - The document ID
|
||||
* @param {String} params.index - The name of the index
|
||||
* @param {String} [params.type=_all] - The type of the document (use `_all` to fetch the first document matching the ID across all types)
|
||||
@ -931,15 +934,14 @@ api.get = ca({
|
||||
routing: {
|
||||
type: 'string'
|
||||
},
|
||||
source: {
|
||||
type: 'list',
|
||||
name: '_source'
|
||||
_source: {
|
||||
type: 'list'
|
||||
},
|
||||
sourceExclude: {
|
||||
_sourceExclude: {
|
||||
type: 'list',
|
||||
name: '_source_exclude'
|
||||
},
|
||||
sourceInclude: {
|
||||
_sourceInclude: {
|
||||
type: 'list',
|
||||
name: '_source_include'
|
||||
}
|
||||
@ -2629,9 +2631,9 @@ api.info = ca({
|
||||
* @param {String} params.preference - Specify the node or shard the operation should be performed on (default: random)
|
||||
* @param {Boolean} params.realtime - Specify whether to perform the operation in realtime or search mode
|
||||
* @param {Boolean} params.refresh - Refresh the shard containing the document before performing the operation
|
||||
* @param {String, String[], Boolean} params.source - True or false to return the _source field or not, or a list of fields to return
|
||||
* @param {String, String[], Boolean} params.sourceExclude - A list of fields to exclude from the returned _source field
|
||||
* @param {String, String[], Boolean} params.sourceInclude - A list of fields to extract and return from the _source field
|
||||
* @param {String, String[], Boolean} params._source - True or false to return the _source field or not, or a list of fields to return
|
||||
* @param {String, String[], Boolean} params._sourceExclude - A list of fields to exclude from the returned _source field
|
||||
* @param {String, String[], Boolean} params._sourceInclude - A list of fields to extract and return from the _source field
|
||||
* @param {String} params.index - The name of the index
|
||||
* @param {String} params.type - The type of the document
|
||||
*/
|
||||
@ -2649,15 +2651,14 @@ api.mget = ca({
|
||||
refresh: {
|
||||
type: 'boolean'
|
||||
},
|
||||
source: {
|
||||
type: 'list',
|
||||
name: '_source'
|
||||
_source: {
|
||||
type: 'list'
|
||||
},
|
||||
sourceExclude: {
|
||||
_sourceExclude: {
|
||||
type: 'list',
|
||||
name: '_source_exclude'
|
||||
},
|
||||
sourceInclude: {
|
||||
_sourceInclude: {
|
||||
type: 'list',
|
||||
name: '_source_include'
|
||||
}
|
||||
@ -2953,9 +2954,10 @@ api.scroll = ca({
|
||||
* @param {String} params.searchType - Search operation type
|
||||
* @param {Number} params.size - Number of hits to return (default: 10)
|
||||
* @param {String, String[], Boolean} params.sort - A comma-separated list of <field>:<direction> pairs
|
||||
* @param {String, String[], Boolean} params.source - True or false to return the _source field or not, or a list of fields to return
|
||||
* @param {String, String[], Boolean} params.sourceExclude - A list of fields to exclude from the returned _source field
|
||||
* @param {String, String[], Boolean} params.sourceInclude - A list of fields to extract and return from the _source field
|
||||
* @param {String} params.source - The URL-encoded request definition using the Query DSL (instead of using request body)
|
||||
* @param {String, String[], Boolean} params._source - True or false to return the _source field or not, or a list of fields to return
|
||||
* @param {String, String[], Boolean} params._sourceExclude - A list of fields to exclude from the returned _source field
|
||||
* @param {String, String[], Boolean} params._sourceInclude - A list of fields to extract and return from the _source field
|
||||
* @param {String, String[], Boolean} params.stats - Specific 'tag' of the request for logging and statistical purposes
|
||||
* @param {String} params.suggestField - Specify which field to use for suggestions
|
||||
* @param {String} [params.suggestMode=missing] - Specify suggest mode
|
||||
@ -3047,14 +3049,16 @@ api.search = ca({
|
||||
type: 'list'
|
||||
},
|
||||
source: {
|
||||
type: 'list',
|
||||
name: '_source'
|
||||
type: 'string'
|
||||
},
|
||||
sourceExclude: {
|
||||
_source: {
|
||||
type: 'list'
|
||||
},
|
||||
_sourceExclude: {
|
||||
type: 'list',
|
||||
name: '_source_exclude'
|
||||
},
|
||||
sourceInclude: {
|
||||
_sourceInclude: {
|
||||
type: 'list',
|
||||
name: '_source_include'
|
||||
},
|
||||
|
||||
@ -32,7 +32,7 @@ function ConnectionPool(config) {
|
||||
// a map of connections to their "id" property, used when sniffing
|
||||
this.index = {};
|
||||
|
||||
this.connections = {
|
||||
this._conns = {
|
||||
alive: [],
|
||||
dead: []
|
||||
};
|
||||
@ -59,18 +59,18 @@ delete ConnectionPool.connectionClasses._default;
|
||||
* @return {[type]} [description]
|
||||
*/
|
||||
ConnectionPool.prototype.select = function (cb) {
|
||||
if (this.connections.alive.length) {
|
||||
if (this._conns.alive.length) {
|
||||
if (this.selector.length > 1) {
|
||||
this.selector(this.connections.alive, cb);
|
||||
this.selector(this._conns.alive, cb);
|
||||
} else {
|
||||
try {
|
||||
_.nextTick(cb, null, this.selector(this.connections.alive));
|
||||
_.nextTick(cb, null, this.selector(this._conns.alive));
|
||||
} catch (e) {
|
||||
cb(e);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_.nextTick(cb, null, this.connections.dead[0]);
|
||||
_.nextTick(cb, null, this.getConnection());
|
||||
}
|
||||
};
|
||||
|
||||
@ -88,19 +88,19 @@ ConnectionPool.prototype.onStatusSet = _.handler(function (status, oldStatus, co
|
||||
|
||||
switch (status) {
|
||||
case 'alive':
|
||||
from = this.connections.dead;
|
||||
to = this.connections.alive;
|
||||
from = this._conns.dead;
|
||||
to = this._conns.alive;
|
||||
break;
|
||||
case 'dead':
|
||||
from = this.connections.alive;
|
||||
to = this.connections.dead;
|
||||
from = this._conns.alive;
|
||||
to = this._conns.dead;
|
||||
break;
|
||||
case 'redead':
|
||||
from = this.connections.dead;
|
||||
to = this.connections.dead;
|
||||
from = this._conns.dead;
|
||||
to = this._conns.dead;
|
||||
break;
|
||||
case 'closed':
|
||||
from = this.connections[oldStatus];
|
||||
from = this._conns[oldStatus];
|
||||
break;
|
||||
}
|
||||
|
||||
@ -119,6 +119,23 @@ ConnectionPool.prototype.onStatusSet = _.handler(function (status, oldStatus, co
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Fetches the first active connection, falls back to dead connections
|
||||
* This is really only here for testing purposes
|
||||
*
|
||||
* @private
|
||||
* @return {Connection} - Some connection
|
||||
*/
|
||||
ConnectionPool.prototype.getConnection = function () {
|
||||
if (this._conns.alive.length) {
|
||||
return this._conns.alive[0];
|
||||
}
|
||||
|
||||
if (this._conns.dead.length) {
|
||||
return this._conns.dead[0];
|
||||
}
|
||||
};
|
||||
|
||||
ConnectionPool.prototype.addConnection = function (connection) {
|
||||
if (!connection.id) {
|
||||
connection.id = connection.host.toString();
|
||||
|
||||
8
src/lib/connectors/angular.js
vendored
8
src/lib/connectors/angular.js
vendored
@ -16,18 +16,24 @@ function AngularConnector(host, config) {
|
||||
_.inherits(AngularConnector, ConnectionAbstract);
|
||||
|
||||
AngularConnector.prototype.request = function (params, cb) {
|
||||
var abort = this.$q.defer();
|
||||
this.$http({
|
||||
method: params.method,
|
||||
url: this.host.makeUrl(params),
|
||||
data: params.body,
|
||||
cache: false,
|
||||
timeout: _.has(params, 'requestTimeout') ? this.requestTimeout : 10000
|
||||
timeout: abort.promise
|
||||
}).then(function (response) {
|
||||
cb(null, response.data, response.status);
|
||||
}, function (err) {
|
||||
cb(new ConnectionFault(err.message));
|
||||
});
|
||||
|
||||
return function () {
|
||||
abort.resolve();
|
||||
};
|
||||
};
|
||||
|
||||
// must be overwritten before this connection can be used
|
||||
AngularConnector.prototype.$http = null;
|
||||
AngularConnector.prototype.$q = null;
|
||||
|
||||
@ -12,7 +12,7 @@ _.each(opts, function (conn, name) {
|
||||
}
|
||||
});
|
||||
|
||||
// custom __default specification
|
||||
// custom _default specification
|
||||
if (opts.xhr) {
|
||||
opts._default = 'xhr';
|
||||
} else if (opts.angular) {
|
||||
|
||||
@ -13,7 +13,6 @@ var handles = {
|
||||
https: require('https')
|
||||
};
|
||||
var _ = require('../utils');
|
||||
var errors = require('../errors');
|
||||
var qs = require('querystring');
|
||||
var KeepAliveAgent = require('agentkeepalive');
|
||||
var ConnectionAbstract = require('../connection');
|
||||
@ -88,7 +87,6 @@ HttpConnector.prototype.request = function (params, cb) {
|
||||
var request;
|
||||
var response;
|
||||
var status = 0;
|
||||
var requestTimeout = _.has(params, 'requestTimeout') ? this.requestTimeout : 10000;
|
||||
var log = this.log;
|
||||
|
||||
var reqParams = this.makeReqParams(params);
|
||||
@ -105,7 +103,6 @@ HttpConnector.prototype.request = function (params, cb) {
|
||||
err = void 0;
|
||||
} else {
|
||||
log.error(err);
|
||||
this.setStatus('dead');
|
||||
}
|
||||
|
||||
log.trace(params.method, reqParams, params.body, response, status);
|
||||
@ -132,14 +129,6 @@ HttpConnector.prototype.request = function (params, cb) {
|
||||
|
||||
request.on('error', cleanUp);
|
||||
|
||||
if (requestTimeout) {
|
||||
// timeout for the entire request.
|
||||
timeoutId = setTimeout(function () {
|
||||
request.abort();
|
||||
request.emit('error', new errors.RequestTimeout('Request timed out at ' + requestTimeout + 'ms'));
|
||||
}, requestTimeout);
|
||||
}
|
||||
|
||||
request.setNoDelay(true);
|
||||
request.setSocketKeepAlive(true);
|
||||
request.chunkedEncoding = false;
|
||||
@ -151,4 +140,8 @@ HttpConnector.prototype.request = function (params, cb) {
|
||||
request.end();
|
||||
}
|
||||
this.requestCount++;
|
||||
|
||||
return function () {
|
||||
request.abort();
|
||||
};
|
||||
};
|
||||
|
||||
7
src/lib/connectors/jquery.js
vendored
7
src/lib/connectors/jquery.js
vendored
@ -14,7 +14,6 @@ JqueryConnector.prototype.request = function (params, cb) {
|
||||
data: params.body,
|
||||
dataType: 'json',
|
||||
headers: params.headers,
|
||||
timeout: params.requestTimeout,
|
||||
done: cb
|
||||
};
|
||||
|
||||
@ -24,7 +23,11 @@ JqueryConnector.prototype.request = function (params, cb) {
|
||||
ajax.password = auths[1];
|
||||
}
|
||||
|
||||
return jQuery.ajax(ajax);
|
||||
var jqXhr = jQuery.ajax(ajax);
|
||||
|
||||
return function () {
|
||||
jqXhr.abort();
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
|
||||
@ -10,7 +10,6 @@ module.exports = XhrConnector;
|
||||
var _ = require('../utils');
|
||||
var ConnectionAbstract = require('../connection');
|
||||
var ConnectionFault = require('../errors').ConnectionFault;
|
||||
var TimeoutError = require('../errors').RequestTimeout;
|
||||
var asyncDefault = !(navigator && /PhantomJS/i.test(navigator.userAgent));
|
||||
|
||||
function XhrConnector(host, config) {
|
||||
@ -50,7 +49,6 @@ if (!getXhr) {
|
||||
|
||||
XhrConnector.prototype.request = function (params, cb) {
|
||||
var xhr = getXhr();
|
||||
var requestTimeout = _.has(params, 'requestTimeout') ? this.requestTimeout : 10000;
|
||||
var timeoutId;
|
||||
var url = this.host.makeUrl(params);
|
||||
var log = this.log;
|
||||
@ -71,13 +69,9 @@ XhrConnector.prototype.request = function (params, cb) {
|
||||
}
|
||||
};
|
||||
|
||||
if (requestTimeout) {
|
||||
timeoutId = setTimeout(function () {
|
||||
xhr.onreadystatechange = _.noop;
|
||||
xhr.abort();
|
||||
cb(new TimeoutError());
|
||||
}, requestTimeout);
|
||||
}
|
||||
|
||||
xhr.send(params.body || void 0);
|
||||
|
||||
return function () {
|
||||
xhr.abort();
|
||||
};
|
||||
};
|
||||
|
||||
@ -22,6 +22,10 @@ function Log(config) {
|
||||
var i;
|
||||
var outputs;
|
||||
|
||||
if (config.loggers) {
|
||||
config.log = config.loggers;
|
||||
}
|
||||
|
||||
if (config.log) {
|
||||
if (_.isArrayOfStrings(config.log)) {
|
||||
outputs = [{
|
||||
|
||||
@ -34,9 +34,8 @@ Console.prototype.setupListeners = function (levels) {
|
||||
};
|
||||
|
||||
Console.prototype.write = function (label, message, to) {
|
||||
/* jshint browser:true */
|
||||
if (window.console && window.console[to]) {
|
||||
window.console[to](this.format(label, message));
|
||||
if (console[to]) {
|
||||
console[to](this.format(label, message));
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -12,7 +12,7 @@ var when = require('when');
|
||||
function Transport(config) {
|
||||
config = config || {};
|
||||
|
||||
var LogClass = _.funcEnum(config, 'logClass', Transport.logs, 'main');
|
||||
var LogClass = (typeof config.log === 'function') ? config.log : require('./log');
|
||||
config.log = this.log = new LogClass(config);
|
||||
|
||||
// overwrite the createDefer method if a new implementation is provided
|
||||
@ -34,22 +34,32 @@ function Transport(config) {
|
||||
// setup max retries
|
||||
this.maxRetries = config.hasOwnProperty('maxRetries') ? config.maxRetries : 3;
|
||||
|
||||
// setup requestTimeout default
|
||||
this.requestTimeout = config.hasOwnProperty('requestTimeout') ? config.requestTimeout : 10000;
|
||||
|
||||
// randomizeHosts option
|
||||
var randomizeHosts = config.hasOwnProperty('randomizeHosts') ? !!config.randomizeHosts : true;
|
||||
|
||||
if (config.host) {
|
||||
config.hosts = config.host;
|
||||
}
|
||||
|
||||
if (config.hosts) {
|
||||
var hostsConfig = _.createArray(config.hosts, function (val) {
|
||||
if (_.isPlainObject(val) || _.isString(val)) {
|
||||
if (_.isPlainObject(val) || _.isString(val) || val instanceof Host) {
|
||||
return val;
|
||||
}
|
||||
});
|
||||
if (!hostsConfig) {
|
||||
throw new Error('Invalid hosts config. Expected a URL, an array of urls, a host config object, or an array of ' +
|
||||
'host config objects.');
|
||||
throw new TypeError('Invalid hosts config. Expected a URL, an array of urls, a host config object, ' +
|
||||
'or an array of host config objects.');
|
||||
}
|
||||
|
||||
var hosts = _.map(hostsConfig, function (conf) {
|
||||
return new Host(conf);
|
||||
return (conf instanceof Host) ? conf : new Host(conf);
|
||||
});
|
||||
|
||||
if (config.randomizeHosts) {
|
||||
if (randomizeHosts) {
|
||||
hosts = _.shuffle(hosts);
|
||||
}
|
||||
|
||||
@ -65,10 +75,6 @@ Transport.serializers = {
|
||||
json: require('./serializers/json')
|
||||
};
|
||||
|
||||
Transport.logs = {
|
||||
main: require('./log')
|
||||
};
|
||||
|
||||
Transport.nodesToHostCallbacks = {
|
||||
main: require('./nodes_to_host')
|
||||
};
|
||||
@ -91,8 +97,12 @@ Transport.prototype.request = function (params, cb) {
|
||||
var self = this;
|
||||
var remainingRetries = this.maxRetries;
|
||||
var connection; // set in sendReqWithConnection
|
||||
var connectionReq; // an object with an abort method, set in sendReqWithConnection
|
||||
var request; // the object returned to the user, might be a deferred
|
||||
var aborted = false; // several connector will respond with an error when the request is aborted
|
||||
var requestAbort; // an abort function, returned by connection#request()
|
||||
var requestTimeout; // the general timeout for the total request (inculding all retries)
|
||||
var requestTimeoutId; // the id of the ^timeout
|
||||
var request; // the object returned to the user, might be a promise
|
||||
var defer; // the defer object, will be set when we are using promises.
|
||||
|
||||
self.log.debug('starting request', params);
|
||||
|
||||
@ -107,26 +117,22 @@ Transport.prototype.request = function (params, cb) {
|
||||
}
|
||||
|
||||
params.req = {
|
||||
requestTimeout: params.requestTimeout,
|
||||
method: params.method,
|
||||
path: params.path,
|
||||
query: params.query,
|
||||
body: params.body,
|
||||
};
|
||||
|
||||
self.connectionPool.select(sendReqWithConnection);
|
||||
|
||||
function abortRequest() {
|
||||
remainingRetries = 0;
|
||||
connectionReq.abort();
|
||||
}
|
||||
|
||||
function sendReqWithConnection(err, _connection) {
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (err) {
|
||||
respond(err);
|
||||
} else if (_connection) {
|
||||
connection = _connection;
|
||||
connectionReq = connection.request(params.req, checkRespForFailure);
|
||||
requestAbort = connection.request(params.req, checkRespForFailure);
|
||||
} else {
|
||||
self.log.warning('No living connections');
|
||||
respond(new errors.NoConnections());
|
||||
@ -134,17 +140,31 @@ Transport.prototype.request = function (params, cb) {
|
||||
}
|
||||
|
||||
function checkRespForFailure(err, body, status) {
|
||||
if (err && remainingRetries) {
|
||||
remainingRetries--;
|
||||
self.log.error(err.message, '-- retrying');
|
||||
self.connectionPool.select(sendReqWithConnection);
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (err) {
|
||||
connection.setStatus('dead');
|
||||
if (remainingRetries) {
|
||||
remainingRetries--;
|
||||
self.log.error('Request error, retrying --', err.message);
|
||||
self.connectionPool.select(sendReqWithConnection);
|
||||
} else {
|
||||
self.log.error('Request complete with error --', err.message);
|
||||
respond(new errors.ConnectionFault(err));
|
||||
}
|
||||
} else {
|
||||
self.log.info('Request complete');
|
||||
respond(err ? new errors.ConnectionFault() : void 0, body, status);
|
||||
respond(void 0, body, status);
|
||||
}
|
||||
}
|
||||
|
||||
function respond(err, body, status) {
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
var parsedBody;
|
||||
|
||||
if (!err && body) {
|
||||
@ -154,18 +174,20 @@ Transport.prototype.request = function (params, cb) {
|
||||
}
|
||||
}
|
||||
|
||||
if (!err) {
|
||||
if ((status < 200 || status >= 300)
|
||||
&& (!params.ignore || !_.contains(params.ignore, status))
|
||||
) {
|
||||
if (errors[status]) {
|
||||
err = new errors[status](parsedBody && parsedBody.error);
|
||||
} else {
|
||||
err = new errors.Generic('unknown error');
|
||||
}
|
||||
// does the response represent an error?
|
||||
if (
|
||||
(!err || err instanceof errors.Serialization)
|
||||
&& (status < 200 || status >= 300)
|
||||
&& (!params.ignore || !_.contains(params.ignore, status))
|
||||
) {
|
||||
if (errors[status]) {
|
||||
err = new errors[status](parsedBody && parsedBody.error);
|
||||
} else {
|
||||
err = new errors.Generic('unknown error');
|
||||
}
|
||||
}
|
||||
|
||||
// how do we parse the body?
|
||||
if (params.castExists) {
|
||||
if (err && err instanceof errors.NotFound) {
|
||||
parsedBody = false;
|
||||
@ -175,29 +197,59 @@ Transport.prototype.request = function (params, cb) {
|
||||
}
|
||||
}
|
||||
|
||||
// how do we send the response?
|
||||
if (typeof cb === 'function') {
|
||||
cb(err, parsedBody, status);
|
||||
if (err) {
|
||||
cb(err);
|
||||
} else {
|
||||
cb(void 0, parsedBody, status);
|
||||
}
|
||||
} else if (err) {
|
||||
request.reject(err);
|
||||
defer.reject(err);
|
||||
} else {
|
||||
request.resolve({
|
||||
defer.resolve({
|
||||
body: parsedBody,
|
||||
status: status
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// determine the API based on the presense of a callback
|
||||
function abortRequest() {
|
||||
if (aborted) {
|
||||
return;
|
||||
}
|
||||
|
||||
aborted = true;
|
||||
remainingRetries = 0;
|
||||
clearTimeout(requestTimeoutId);
|
||||
if (typeof requestAbort === 'function') {
|
||||
requestAbort();
|
||||
}
|
||||
}
|
||||
|
||||
// set the requestTimeout
|
||||
requestTimeout = params.hasOwnProperty('requestTimeout') ? params.requestTimeout : this.requestTimeout;
|
||||
|
||||
if (requestTimeout && requestTimeout !== Infinity) {
|
||||
requestTimeoutId = setTimeout(function () {
|
||||
respond(new errors.RequestTimeout());
|
||||
abortRequest();
|
||||
}, requestTimeout);
|
||||
}
|
||||
|
||||
// determine the response based on the presense of a callback
|
||||
if (typeof cb === 'function') {
|
||||
request = {
|
||||
abort: abortRequest
|
||||
};
|
||||
} else {
|
||||
var defer = this.createDefer();
|
||||
defer.promise.abort = abortRequest;
|
||||
defer = this.createDefer();
|
||||
request = defer.promise;
|
||||
request.abort = abortRequest;
|
||||
}
|
||||
|
||||
self.connectionPool.select(sendReqWithConnection);
|
||||
|
||||
return request;
|
||||
};
|
||||
|
||||
@ -232,6 +284,10 @@ Transport.prototype.sniff = function (cb) {
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Close the Transport, which closes the logs and connection pool
|
||||
* @return {[type]} [description]
|
||||
*/
|
||||
Transport.prototype.close = function () {
|
||||
this.log.close();
|
||||
this.connectionPool.close();
|
||||
|
||||
@ -508,14 +508,14 @@ describe('Client Action runner', function () {
|
||||
two: '-69',
|
||||
three: 15,
|
||||
four: now,
|
||||
five: new Date(999, 2399, 152433)
|
||||
five: new Date('2013-03-01T01:10:00Z')
|
||||
}, function (err, params) {
|
||||
if (err) { throw err; }
|
||||
params.query.one.should.equal('42');
|
||||
params.query.two.should.equal('-69');
|
||||
params.query.three.should.equal('15');
|
||||
params.query.four.should.equal('' + now.getTime());
|
||||
params.query.five.should.equal('-11162948400000');
|
||||
params.query.five.should.equal('1362100200000');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
@ -39,8 +39,8 @@ describe('Connection Pool', function () {
|
||||
|
||||
_.keys(pool.index).should.eql([host.toString()]);
|
||||
|
||||
pool.connections.alive.should.eql([connection]);
|
||||
pool.connections.dead.should.eql([]);
|
||||
pool._conns.alive.should.eql([connection]);
|
||||
pool._conns.dead.should.eql([]);
|
||||
});
|
||||
|
||||
describe('#removeConnection', function () {
|
||||
@ -48,8 +48,8 @@ describe('Connection Pool', function () {
|
||||
pool.addConnection(connection);
|
||||
pool.removeConnection(connection2);
|
||||
|
||||
pool.connections.alive.should.eql([connection]);
|
||||
pool.connections.dead.should.eql([]);
|
||||
pool._conns.alive.should.eql([connection]);
|
||||
pool._conns.dead.should.eql([]);
|
||||
_.keys(pool.index).length.should.eql(1);
|
||||
});
|
||||
|
||||
@ -68,8 +68,8 @@ describe('Connection Pool', function () {
|
||||
it('#setHosts syncs the list of Hosts with the connections in the index', function () {
|
||||
// there should now be two connections
|
||||
pool.setHosts([host, host2]);
|
||||
pool.connections.alive.length.should.eql(2);
|
||||
pool.connections.dead.length.should.eql(0);
|
||||
pool._conns.alive.length.should.eql(2);
|
||||
pool._conns.dead.length.should.eql(0);
|
||||
|
||||
// get the new connections
|
||||
connection = pool.index[host.toString()];
|
||||
@ -77,13 +77,13 @@ describe('Connection Pool', function () {
|
||||
|
||||
// should remove the second connection
|
||||
pool.setHosts([host]);
|
||||
pool.connections.alive.should.eql([connection]);
|
||||
pool.connections.dead.length.should.eql(0);
|
||||
pool._conns.alive.should.eql([connection]);
|
||||
pool._conns.dead.length.should.eql(0);
|
||||
|
||||
// should skip the first, but create a new for the second
|
||||
pool.setHosts([host, host2]);
|
||||
pool.connections.alive.length.should.eql(2);
|
||||
pool.connections.dead.length.should.eql(0);
|
||||
pool._conns.alive.length.should.eql(2);
|
||||
pool._conns.dead.length.should.eql(0);
|
||||
|
||||
// a new connection should have been created
|
||||
pool.index[host2.toString()].should.not.be.exactly(connection2);
|
||||
@ -157,8 +157,8 @@ describe('Connection Pool', function () {
|
||||
});
|
||||
|
||||
it('should automatically select the first dead connection when there no living connections', function (done) {
|
||||
pool.connections.alive = [];
|
||||
pool.connections.dead = [1, 2, 3];
|
||||
pool._conns.alive = [];
|
||||
pool._conns.dead = [1, 2, 3];
|
||||
|
||||
pool.select(function (err, selection) {
|
||||
selection.should.be.exactly(1);
|
||||
@ -184,15 +184,15 @@ describe('Connection Pool', function () {
|
||||
connection = pool.index[host2.toString()];
|
||||
connection2 = pool.index[host2.toString()];
|
||||
|
||||
pool.connections.alive.should.have.length(2);
|
||||
pool.connections.dead.should.have.length(0);
|
||||
pool._conns.alive.should.have.length(2);
|
||||
pool._conns.dead.should.have.length(0);
|
||||
});
|
||||
|
||||
it('moves an alive connection to dead', function () {
|
||||
connection.setStatus('dead');
|
||||
|
||||
pool.connections.alive.should.have.length(1);
|
||||
pool.connections.dead.should.have.length(1);
|
||||
pool._conns.alive.should.have.length(1);
|
||||
pool._conns.dead.should.have.length(1);
|
||||
});
|
||||
|
||||
it('moves a dead connection to the end of the dead list when it re-dies', function () {
|
||||
@ -200,34 +200,34 @@ describe('Connection Pool', function () {
|
||||
connection2.setStatus('dead');
|
||||
|
||||
// connection is at the front of the line
|
||||
pool.connections.dead[0].should.be.exactly(connection);
|
||||
pool._conns.dead[0].should.be.exactly(connection);
|
||||
// it re-dies
|
||||
connection.setStatus('dead');
|
||||
// connection2 is now at the front of the list
|
||||
pool.connections.dead[0].should.be.exactly(connection2);
|
||||
pool._conns.dead[0].should.be.exactly(connection2);
|
||||
});
|
||||
|
||||
it('moves a does nothing when a connection is re-alive', function () {
|
||||
var last = pool.connections.alive[pool.connections.alive.length - 1];
|
||||
var first = pool.connections.alive[0];
|
||||
var last = pool._conns.alive[pool._conns.alive.length - 1];
|
||||
var first = pool._conns.alive[0];
|
||||
|
||||
last.should.not.be.exactly(first);
|
||||
|
||||
// first re-alives
|
||||
first.setStatus('alive');
|
||||
pool.connections.alive[0].should.be.exactly(first);
|
||||
pool.connections.alive[pool.connections.alive.length - 1].should.be.exactly(last);
|
||||
pool._conns.alive[0].should.be.exactly(first);
|
||||
pool._conns.alive[pool._conns.alive.length - 1].should.be.exactly(last);
|
||||
|
||||
// last re-alives
|
||||
last.setStatus('alive');
|
||||
pool.connections.alive[0].should.be.exactly(first);
|
||||
pool.connections.alive[pool.connections.alive.length - 1].should.be.exactly(last);
|
||||
pool._conns.alive[0].should.be.exactly(first);
|
||||
pool._conns.alive[pool._conns.alive.length - 1].should.be.exactly(last);
|
||||
});
|
||||
|
||||
it('removes all its connection when it closes, causing them to be closed', function () {
|
||||
pool.close();
|
||||
pool.connections.alive.should.have.length(0);
|
||||
pool.connections.dead.should.have.length(0);
|
||||
pool._conns.alive.should.have.length(0);
|
||||
pool._conns.dead.should.have.length(0);
|
||||
|
||||
connection.status.should.eql('closed');
|
||||
connection2.status.should.eql('closed');
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
var Log = require('../../src/lib/log');
|
||||
var StdioLogger = require('../../src/lib/loggers/console');
|
||||
var ConsoleLogger = require('../../src/lib/loggers/console');
|
||||
var sinon = require('sinon');
|
||||
var parentLog;
|
||||
|
||||
@ -16,7 +16,7 @@ function makeLogger(parent, levels) {
|
||||
var config = {
|
||||
levels: Log.parseLevels(levels || 'trace')
|
||||
};
|
||||
return new StdioLogger(parent, config);
|
||||
return new ConsoleLogger(parent, config);
|
||||
}
|
||||
|
||||
var stub = require('./auto_release_stub').make();
|
||||
@ -25,4 +25,17 @@ describe('Console Logger', function () {
|
||||
|
||||
require('./generic_logger_tests')(makeLogger);
|
||||
|
||||
it('checks before using unique logging functions, falls back to #log()', function () {
|
||||
var _warning = console.warn;
|
||||
console.warn = null;
|
||||
stub(console, 'log');
|
||||
|
||||
var logger = makeLogger();
|
||||
|
||||
logger.onWarning('message');
|
||||
console.log.callCount.should.eql(1);
|
||||
|
||||
console.warn = _warning;
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@ -210,7 +210,7 @@ describe('Http Connector', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('logs error events, and sets the connection to dead when an error occurs', function (done) {
|
||||
it('logs error events when an error occurs', function (done) {
|
||||
var con = new HttpConnection(new Host('http://google.com'));
|
||||
|
||||
stub(con.log, 'error');
|
||||
@ -233,14 +233,11 @@ describe('Http Connector', function () {
|
||||
con.log.warning.callCount.should.eql(0);
|
||||
con.log.debug.callCount.should.eql(0);
|
||||
|
||||
// set status to dead
|
||||
con.status.should.eql('dead');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('logs error events, and sets the connection to dead', function (done) {
|
||||
it('logs error events', function (done) {
|
||||
var con = new HttpConnection(new Host('http://google.com'));
|
||||
|
||||
stub(con.log, 'error');
|
||||
@ -255,9 +252,6 @@ describe('Http Connector', function () {
|
||||
con.log.error.callCount.should.eql(1);
|
||||
con.log.error.lastCall.args[0].message.should.eql('actual error');
|
||||
|
||||
// set status to dead
|
||||
con.status.should.eql('dead');
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
@ -279,16 +273,6 @@ describe('Http Connector', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('and sets the connection to dead', function (done) {
|
||||
var con = new HttpConnection(new Host('https://google.com'));
|
||||
stub(https, 'request', makeStubReqWithMsgWhichErrorsMidBody());
|
||||
|
||||
con.request({}, function (err, resp, status) {
|
||||
con.status.should.eql('dead');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('passes the original error on', function (done) {
|
||||
var con = new HttpConnection(new Host('https://google.com'));
|
||||
stub(https, 'request', makeStubReqWithMsgWhichErrorsMidBody(new Error('no more message :(')));
|
||||
@ -365,4 +349,37 @@ describe('Http Connector', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('HTTP specifics', function () {
|
||||
it('uses TCP no delay', function (done) {
|
||||
var con = new HttpConnection(new Host('localhost'));
|
||||
stub(http.ClientRequest.prototype, 'setNoDelay');
|
||||
var server = nock('http://localhost').get('').reply(200);
|
||||
|
||||
con.request({}, function (err, resp, status) {
|
||||
http.ClientRequest.prototype.setNoDelay.callCount.should.eql(1);
|
||||
http.ClientRequest.prototype.setNoDelay.lastCall.args[0].should.eql(true);
|
||||
server.done();
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('sets the Content-Length header properly', function (done) {
|
||||
var con = new HttpConnection(new Host('localhost'));
|
||||
stub(http.ClientRequest.prototype, 'setHeader');
|
||||
var server = nock('http://localhost').get('').reply(200);
|
||||
|
||||
var body = 'pasta and 𝄞';
|
||||
body.length.should.eql(12); // nope
|
||||
Buffer.byteLength(body, 'utf8').should.eql(14); // yep
|
||||
|
||||
con.request({
|
||||
body: body
|
||||
}, function (err, resp, status) {
|
||||
http.ClientRequest.prototype.setHeader.lastCall.args.should.eql(['Content-Length', 14]);
|
||||
server.done();
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@ -1,36 +1,43 @@
|
||||
var Transport = require('../../src/lib/transport');
|
||||
var Host = require('../../src/lib/host');
|
||||
var errors = require('../../src/lib/errors');
|
||||
var when = require('when');
|
||||
|
||||
var sinon = require('sinon');
|
||||
var nock = require('nock');
|
||||
var should = require('should');
|
||||
var _ = require('lodash');
|
||||
var nodeList = require('../fixtures/short_node_list.json');
|
||||
|
||||
var stub = require('./auto_release_stub').make();
|
||||
|
||||
nock.disableNetConnect();
|
||||
|
||||
/**
|
||||
* Allows the tests call #request() without it doing anything past trying to select
|
||||
* a connection.
|
||||
* @param {Transport} tran - the transport to neuter
|
||||
*/
|
||||
function shortCircuitRequest(tran, delay) {
|
||||
stub(tran.connectionPool, 'select', function (cb) {
|
||||
setTimeout(function () {
|
||||
// call back with no error, and no connection === "NoConnections"
|
||||
cb();
|
||||
}, delay);
|
||||
});
|
||||
}
|
||||
|
||||
describe('Transport Class', function () {
|
||||
|
||||
describe('Constructor', function () {
|
||||
it('Accepts a log class and intanciates it at this.log', function () {
|
||||
function CustomLogClass() {}
|
||||
var trans = new Transport({
|
||||
logClass: CustomLogClass
|
||||
log: CustomLogClass
|
||||
});
|
||||
|
||||
trans.log.should.be.an.instanceOf(CustomLogClass);
|
||||
});
|
||||
|
||||
it('Accepts the name of a log class that is defined on Transport.logs', function () {
|
||||
Transport.logs.custom = function () {
|
||||
// custom logger class!
|
||||
};
|
||||
|
||||
var trans = new Transport({
|
||||
logClass: 'custom'
|
||||
});
|
||||
|
||||
trans.log.should.be.an.instanceOf(Transport.logs.custom);
|
||||
delete Transport.logs.custom;
|
||||
});
|
||||
|
||||
it('Accepts a "createDefer" function, which can be used to tie into other promise libs.', function () {
|
||||
function CustomPromise() {
|
||||
this.then = function () {};
|
||||
@ -65,18 +72,131 @@ describe('Transport Class', function () {
|
||||
delete Transport.connectionPools.custom;
|
||||
});
|
||||
|
||||
it('Throws an error when the logClass or connectionPool configs are set wrong', function () {
|
||||
it('Throws an error when connectionPool config is set wrong', function () {
|
||||
(function () {
|
||||
var trans = new Transport({
|
||||
connectionPool: 'pasta'
|
||||
});
|
||||
}).should.throw(/invalid connectionpool/i);
|
||||
});
|
||||
|
||||
(function () {
|
||||
describe('host config', function () {
|
||||
it('rejects non-strings/objects', function () {
|
||||
(function () {
|
||||
var trans = new Transport({
|
||||
host: [
|
||||
'localhost',
|
||||
9393
|
||||
]
|
||||
});
|
||||
}).should.throw(TypeError);
|
||||
|
||||
(function () {
|
||||
var trans = new Transport({
|
||||
host: [
|
||||
[9292]
|
||||
]
|
||||
});
|
||||
}).should.throw(TypeError);
|
||||
});
|
||||
|
||||
it('accepts the config value on the host: key', function () {
|
||||
stub(Transport.connectionPools.main.prototype, 'setHosts');
|
||||
var trans = new Transport({
|
||||
logClass: 'pasta'
|
||||
host: 'localhost'
|
||||
});
|
||||
}).should.throw(/invalid logclass/i);
|
||||
|
||||
trans.connectionPool.setHosts.callCount.should.eql(1);
|
||||
trans.connectionPool.setHosts.lastCall.args[0].should.eql([
|
||||
new Host('localhost')
|
||||
]);
|
||||
});
|
||||
|
||||
it('accepts the config value on the hosts: key', function () {
|
||||
stub(Transport.connectionPools.main.prototype, 'setHosts');
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.connectionPool.setHosts.callCount.should.eql(1);
|
||||
trans.connectionPool.setHosts.lastCall.args[0].should.eql([
|
||||
new Host('localhost')
|
||||
]);
|
||||
});
|
||||
|
||||
it('accepts A host object as the config', function () {
|
||||
stub(Transport.connectionPools.main.prototype, 'setHosts');
|
||||
var h = new Host('localhost');
|
||||
var trans = new Transport({
|
||||
host: h
|
||||
});
|
||||
|
||||
trans.connectionPool.setHosts.callCount.should.eql(1);
|
||||
trans.connectionPool.setHosts.lastCall.args[0][0].should.be.exactly(h);
|
||||
});
|
||||
|
||||
it('accepts strings as the config', function () {
|
||||
stub(Transport.connectionPools.main.prototype, 'setHosts');
|
||||
var trans = new Transport({
|
||||
hosts: [
|
||||
'localhost:8888',
|
||||
]
|
||||
});
|
||||
|
||||
trans.connectionPool.setHosts.callCount.should.eql(1);
|
||||
trans.connectionPool.setHosts.lastCall.args[0].should.eql([
|
||||
new Host({
|
||||
host: 'localhost',
|
||||
port: 8888
|
||||
})
|
||||
]);
|
||||
});
|
||||
|
||||
it('accepts objects as the config', function () {
|
||||
stub(Transport.connectionPools.main.prototype, 'setHosts');
|
||||
var trans = new Transport({
|
||||
hosts: [
|
||||
{
|
||||
protocol: 'https',
|
||||
host: 'myescluster.com',
|
||||
port: '777',
|
||||
path: '/bon/iver',
|
||||
query: {
|
||||
access: 'all'
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
trans.connectionPool.setHosts.callCount.should.eql(1);
|
||||
trans.connectionPool.setHosts.lastCall.args[0].should.eql([
|
||||
new Host('https://myescluster.com:777/bon/iver?access=all')
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('randomizeHosts options', function () {
|
||||
it('calls _.shuffle be default', function () {
|
||||
var _ = require('../../src/lib/utils');
|
||||
stub(Transport.connectionPools.main.prototype, 'setHosts');
|
||||
stub(_, 'shuffle');
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
_.shuffle.callCount.should.eql(1);
|
||||
});
|
||||
it('skips the call to _.shuffle when false', function () {
|
||||
var _ = require('../../src/lib/utils');
|
||||
stub(Transport.connectionPools.main.prototype, 'setHosts');
|
||||
stub(_, 'shuffle');
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost',
|
||||
randomizeHosts: false
|
||||
});
|
||||
|
||||
_.shuffle.callCount.should.eql(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -105,7 +225,6 @@ describe('Transport Class', function () {
|
||||
done();
|
||||
}, 5);
|
||||
});
|
||||
|
||||
it('calls the nodesToHostCallback with the list of nodes', function (done) {
|
||||
trans.nodesToHostCallback = function (nodes) {
|
||||
nodes.should.eql(nodeList);
|
||||
@ -114,7 +233,6 @@ describe('Transport Class', function () {
|
||||
};
|
||||
trans.sniff();
|
||||
});
|
||||
|
||||
it('takes the host configs, converts them into Host objects, and passes them to connectionPool.setHosts',
|
||||
function (done) {
|
||||
trans.sniff(function () {
|
||||
@ -133,7 +251,6 @@ describe('Transport Class', function () {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('passed back errors caught from the request', function (done) {
|
||||
trans.request.func = function (params, cb) {
|
||||
process.nextTick(function () {
|
||||
@ -146,7 +263,6 @@ describe('Transport Class', function () {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('passed back the full server response', function (done) {
|
||||
trans.sniff(function (err, resp, status) {
|
||||
resp.should.include({
|
||||
@ -164,4 +280,561 @@ describe('Transport Class', function () {
|
||||
});
|
||||
});
|
||||
|
||||
describe('#createDefer', function () {
|
||||
it('returns a when.js promise by default', function () {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.createDefer().constructor.should.be.exactly(when.defer().constructor);
|
||||
});
|
||||
it('is overridden by the createDefer option', function () {
|
||||
var when = require('when');
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost',
|
||||
createDefer: function () {
|
||||
return 'pasta';
|
||||
}
|
||||
});
|
||||
|
||||
trans.createDefer().should.be.exactly('pasta');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#request', function () {
|
||||
it('logs when it begins', function (done) {
|
||||
var trans = new Transport();
|
||||
stub(trans.log, 'debug');
|
||||
stub(trans.connectionPool, 'select', function (cb) {
|
||||
// simulate "no connections"
|
||||
process.nextTick(cb);
|
||||
});
|
||||
|
||||
trans.request({}, function () {
|
||||
trans.log.debug.callCount.should.eql(1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
it('logs when it begins', function (done) {
|
||||
var trans = new Transport();
|
||||
stub(trans.log, 'debug');
|
||||
stub(trans.connectionPool, 'select', function (cb) {
|
||||
// simulate "no connections"
|
||||
process.nextTick(cb);
|
||||
});
|
||||
|
||||
trans.request({
|
||||
body: 'JSON!!',
|
||||
method: 'GET'
|
||||
}, function (err) {
|
||||
should.exist(err);
|
||||
err.should.be.an.instanceOf(TypeError);
|
||||
err.message.should.match(/body.*method.*get/i);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('gets a body', function () {
|
||||
it('serializes it', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
var conn = trans.connectionPool.getConnection();
|
||||
var body = {
|
||||
_id: 'simple body',
|
||||
name: 'ഢധയമബ'
|
||||
};
|
||||
|
||||
stub(conn, 'request', function (params) {
|
||||
JSON.parse(params.body).should.eql(body);
|
||||
done();
|
||||
});
|
||||
|
||||
trans.request({
|
||||
body: body
|
||||
});
|
||||
});
|
||||
it('serializes bulk bodies', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
var conn = trans.connectionPool.getConnection();
|
||||
var body = [
|
||||
{ _id: 'simple body'},
|
||||
{ name: 'ഢധയമബ' }
|
||||
];
|
||||
|
||||
stub(conn, 'request', function (params) {
|
||||
params.body.should.eql(
|
||||
'{"_id":"simple body"}\n' +
|
||||
'{"name":"ഢധയമബ"}\n'
|
||||
);
|
||||
done();
|
||||
});
|
||||
|
||||
trans.request({
|
||||
body: body,
|
||||
bulkBody: true
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('gets a body it cant serialize', function () {
|
||||
it('throws an error', function () {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
var conn = trans.connectionPool.getConnection();
|
||||
var body = {
|
||||
_id: 'circular body'
|
||||
};
|
||||
body.body = body;
|
||||
|
||||
(function () {
|
||||
trans.request({
|
||||
body: body
|
||||
});
|
||||
}).should.throw(TypeError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when selecting a connection', function () {
|
||||
it('logs a warning, and responds with NoConnection when it receives nothing', function (done) {
|
||||
var trans = new Transport();
|
||||
stub(trans.log, 'warning');
|
||||
trans.request({}, function (err, body, status) {
|
||||
trans.log.warning.callCount.should.eql(1);
|
||||
err.should.be.an.instanceOf(errors.NoConnections);
|
||||
should.not.exist(body);
|
||||
should.not.exist(status);
|
||||
done();
|
||||
});
|
||||
});
|
||||
it('quits if a sync selector throws an error', function () {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost',
|
||||
selector: function () {
|
||||
throw new Error('I am broken');
|
||||
}
|
||||
});
|
||||
|
||||
trans.request({}, function (err, body, status) {
|
||||
err.message.should.eql('I am broken');
|
||||
});
|
||||
});
|
||||
it('quits if gets an error from an async selector', function () {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost',
|
||||
selector: function (connections, cb) {
|
||||
process.nextTick(function () {
|
||||
cb(new Error('I am broken'));
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
trans.request({}, function (err, body, status) {
|
||||
err.message.should.eql('I am broken');
|
||||
});
|
||||
});
|
||||
it('calls connection#request once it gets one', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
var conn = trans.connectionPool.getConnection();
|
||||
|
||||
stub(conn, 'request', function () {
|
||||
done();
|
||||
});
|
||||
|
||||
trans.request({}, function () {});
|
||||
});
|
||||
});
|
||||
|
||||
describe('gets a connection err', function () {
|
||||
function testRetries(retries, done) {
|
||||
var randomSelector = require('../../src/lib/selectors/random');
|
||||
var connections;
|
||||
var attempts = 0;
|
||||
function failRequest(params, cb) {
|
||||
attempts++;
|
||||
process.nextTick(function () {
|
||||
cb(new Error('Unable to do that thing you wanted'));
|
||||
});
|
||||
}
|
||||
|
||||
var trans = new Transport({
|
||||
hosts: _.map(new Array(retries + 1), function (i) {
|
||||
return 'localhost/' + i;
|
||||
}),
|
||||
maxRetries: retries,
|
||||
selector: function (_conns) {
|
||||
connections = _conns;
|
||||
return randomSelector(_conns);
|
||||
}
|
||||
});
|
||||
|
||||
// trigger a select so that we can harvest the connection list
|
||||
trans.connectionPool.select(_.noop);
|
||||
_.each(connections, function (conn) {
|
||||
stub(conn, 'request', failRequest);
|
||||
});
|
||||
|
||||
trans.request({}, function (err, resp, body) {
|
||||
attempts.should.eql(retries + 1);
|
||||
err.should.be.an.instanceOf(errors.ConnectionFault);
|
||||
should.not.exist(resp);
|
||||
should.not.exist(body);
|
||||
done();
|
||||
});
|
||||
}
|
||||
it('retries when there are retries remaining', function (done) {
|
||||
testRetries(30, done);
|
||||
});
|
||||
it('responds when there are no retries', function (done) {
|
||||
testRetries(0, done);
|
||||
});
|
||||
});
|
||||
|
||||
describe('server responds', function () {
|
||||
var serverMock;
|
||||
|
||||
before(function () {
|
||||
serverMock = nock('http://localhost')
|
||||
.get('/give-me-400')
|
||||
.reply(400, 'sorry bub')
|
||||
|
||||
.get('/give-me-404')
|
||||
.times(2)
|
||||
.reply(404, 'nothing here')
|
||||
|
||||
.get('/give-me-500')
|
||||
.reply(500, 'ah shit')
|
||||
|
||||
.get('/exists?')
|
||||
.reply(200, '{"status":200}')
|
||||
|
||||
.get('/give-me-someth')
|
||||
.reply(200, '{"not":"valid')
|
||||
|
||||
.get('/')
|
||||
.reply(200, '{"the answer":42}')
|
||||
|
||||
.get('/huh?')
|
||||
.reply(530, 'boo');
|
||||
});
|
||||
|
||||
after(function () {
|
||||
serverMock.done();
|
||||
});
|
||||
|
||||
describe('with a 400 status code', function () {
|
||||
it('passes back a 400/BadRequest error', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.request({
|
||||
path: '/give-me-400'
|
||||
}, function (err, body, status) {
|
||||
err.should.be.an.instanceOf(errors[400]);
|
||||
err.should.be.an.instanceOf(errors.BadRequest);
|
||||
should.not.exist(body);
|
||||
should.not.exist(status);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('with a 404 status code', function () {
|
||||
describe('and castExists is set', function () {
|
||||
it('sends back false', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.request({
|
||||
path: '/give-me-404',
|
||||
castExists: true
|
||||
}, function (err, body, status) {
|
||||
should.not.exist(err);
|
||||
body.should.eql(false);
|
||||
status.should.eql(404);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('and the castExists param is not set', function () {
|
||||
it('sends back a 404/NotFound error', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.request({
|
||||
path: '/give-me-404'
|
||||
}, function (err, body, status) {
|
||||
err.should.be.an.instanceOf(errors[404]);
|
||||
err.should.be.an.instanceOf(errors.NotFound);
|
||||
should.not.exist(body);
|
||||
should.not.exist(status);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('with a 500 status code', function () {
|
||||
it('passes back a 500/InternalServerError error', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.request({
|
||||
path: '/give-me-500'
|
||||
}, function (err, body, status) {
|
||||
err.should.be.an.instanceOf(errors[500]);
|
||||
err.should.be.an.instanceOf(errors.InternalServerError);
|
||||
should.not.exist(body);
|
||||
should.not.exist(status);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('with a 500 status code', function () {
|
||||
it('passes back a Generic error', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.request({
|
||||
path: '/huh?'
|
||||
}, function (err, body, status) {
|
||||
err.should.be.an.instanceOf(errors.Generic);
|
||||
should.not.exist(body);
|
||||
should.not.exist(status);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('with a 200 status code', function () {
|
||||
describe('and the castExists param is set', function () {
|
||||
it('sends back true', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.request({
|
||||
path: '/exists?',
|
||||
castExists: true
|
||||
}, function (err, body, status) {
|
||||
should.not.exist(err);
|
||||
body.should.eql(true);
|
||||
status.should.eql(200);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('with a partial response body', function () {
|
||||
it('sends back a serialization error', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.request({
|
||||
path: '/give-me-someth',
|
||||
}, function (err, body, status) {
|
||||
err.should.be.an.instanceOf(errors.Serialization);
|
||||
should.not.exist(body);
|
||||
should.not.exist(status);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
describe('with a valid response body', function () {
|
||||
it('sends back the body and status code with no error', function (done) {
|
||||
var trans = new Transport({
|
||||
hosts: 'localhost'
|
||||
});
|
||||
|
||||
trans.request({
|
||||
path: '/',
|
||||
}, function (err, body, status) {
|
||||
should.not.exist(err);
|
||||
body.should.eql({
|
||||
'the answer': 42
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('return value', function () {
|
||||
it('returns an object with an abort() method when a callback is sent', function () {
|
||||
var tran = new Transport();
|
||||
shortCircuitRequest(tran);
|
||||
var ret = tran.request({}, _.noop);
|
||||
ret.should.have.type('object');
|
||||
ret.abort.should.have.type('function');
|
||||
});
|
||||
it('the object is a promise when a callback is not suplied', function () {
|
||||
var tran = new Transport();
|
||||
shortCircuitRequest(tran);
|
||||
var ret = tran.request({});
|
||||
when.isPromise(ret).should.be.ok;
|
||||
ret.abort.should.have.type('function');
|
||||
});
|
||||
it('the promise is always pulled from the defer created by this.createDefer()', function () {
|
||||
var fakePromise = {};
|
||||
var tran = new Transport({
|
||||
createDefer: function () {
|
||||
return {
|
||||
resolve: _.noop,
|
||||
reject: _.noop,
|
||||
promise: fakePromise
|
||||
};
|
||||
}
|
||||
});
|
||||
shortCircuitRequest(tran);
|
||||
var ret = tran.request({});
|
||||
ret.should.be.exactly(fakePromise);
|
||||
ret.abort.should.have.type('function');
|
||||
});
|
||||
});
|
||||
|
||||
describe('aborting', function () {
|
||||
it('prevents the request from starting if called in the same tick', function () {
|
||||
var tran = new Transport({
|
||||
host: 'localhost'
|
||||
});
|
||||
|
||||
var con = tran.connectionPool.getConnection();
|
||||
stub(con, 'request', function () {
|
||||
throw new Error('Request should not have been called.');
|
||||
});
|
||||
|
||||
var ret = tran.request({});
|
||||
ret.abort();
|
||||
});
|
||||
it('calls the function returned by the connector if it has been called', function (done) {
|
||||
var tran = new Transport({
|
||||
host: 'localhost'
|
||||
});
|
||||
|
||||
var con = tran.connectionPool.getConnection();
|
||||
stub(con, 'request', function () {
|
||||
process.nextTick(function () {
|
||||
ret.abort();
|
||||
});
|
||||
return function () {
|
||||
done();
|
||||
};
|
||||
});
|
||||
|
||||
var ret = tran.request({});
|
||||
});
|
||||
it('ignores the response from the connection when the connector does not support aborting', function (done) {
|
||||
var tran = new Transport({
|
||||
host: 'localhost'
|
||||
});
|
||||
|
||||
var con = tran.connectionPool.getConnection();
|
||||
stub(con, 'request', function (params, cb) {
|
||||
cb();
|
||||
});
|
||||
|
||||
var ret = tran.request({}, function () {
|
||||
throw new Error('Callback should not have been called.');
|
||||
});
|
||||
ret.abort();
|
||||
setTimeout(done, 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('timeout', function () {
|
||||
it('uses 10 seconds for the default', function (done) {
|
||||
this.timeout(5);
|
||||
var clock;
|
||||
stub.autoRelease(clock = sinon.useFakeTimers('setTimeout'));
|
||||
|
||||
var tran = new Transport({});
|
||||
|
||||
stub(tran.connectionPool, 'select', function (cb) {
|
||||
setTimeout(cb, 11000);
|
||||
});
|
||||
|
||||
tran.request({}, function (err) {
|
||||
err.should.be.an.instanceOf(errors.RequestTimeout);
|
||||
done();
|
||||
});
|
||||
|
||||
clock.tick(10010);
|
||||
});
|
||||
it('inherits the requestTimeout from the transport', function (done) {
|
||||
this.timeout(5);
|
||||
var clock;
|
||||
stub.autoRelease(clock = sinon.useFakeTimers('setTimeout'));
|
||||
|
||||
var tran = new Transport({
|
||||
requestTimeout: 5000
|
||||
});
|
||||
|
||||
stub(tran.connectionPool, 'select', function (cb) {
|
||||
setTimeout(cb, 11000);
|
||||
});
|
||||
|
||||
tran.request({}, function (err) {
|
||||
err.should.be.an.instanceOf(errors.RequestTimeout);
|
||||
done();
|
||||
});
|
||||
|
||||
clock.tick(6000);
|
||||
});
|
||||
[false, 0, null].forEach(function (falsy) {
|
||||
it('skips the timeout when it is ' + falsy, function (done) {
|
||||
this.timeout(5);
|
||||
var clock;
|
||||
stub.autoRelease(clock = sinon.useFakeTimers('setTimeout'));
|
||||
|
||||
var tran = new Transport({
|
||||
requestTimeout: 5000
|
||||
});
|
||||
|
||||
stub(tran.connectionPool, 'select', function (cb) {
|
||||
setTimeout(function () {
|
||||
cb(new Error('it works'));
|
||||
}, 10000);
|
||||
});
|
||||
|
||||
tran.request({
|
||||
requestTimeout: falsy
|
||||
}, function (err) {
|
||||
err.message.should.eql('it works');
|
||||
done();
|
||||
});
|
||||
|
||||
clock.tick(6000);
|
||||
process.nextTick(function () {
|
||||
clock.tick(6000);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#close', function () {
|
||||
it('proxies the call to it\'s log and connection pool', function () {
|
||||
var tran = new Transport();
|
||||
stub(tran.connectionPool, 'close');
|
||||
stub(tran.log, 'close');
|
||||
|
||||
tran.close();
|
||||
|
||||
tran.connectionPool.close.callCount.should.eql(1);
|
||||
tran.log.close.callCount.should.eql(1);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user