use eslint autofix to start fixing violations

This commit is contained in:
spalger
2017-06-14 18:48:24 -07:00
parent da8e558817
commit 3d75c6ff0f
97 changed files with 1281 additions and 1280 deletions

View File

@ -1,16 +1,16 @@
var _ = require('lodash');
var pkg = require('../package.json');
var branches = [...pkg.config.supported_es_branches, ...pkg.config.unstable_es_branches];
var semver = require('semver');
const _ = require('lodash');
const pkg = require('../package.json');
const branches = [...pkg.config.supported_es_branches, ...pkg.config.unstable_es_branches];
const semver = require('semver');
function nextMajorVersion() {
const largestMajor = branches
.map(v => parseFloat(v.split('.')[0]))
.filter(n => !isNaN(n))
.sort((a, b) => b - a)
.shift()
.shift();
return new Version(`${largestMajor + 1}.0.0`)
return new Version(`${largestMajor + 1}.0.0`);
}
function nextMinorVersion(major) {
@ -42,11 +42,11 @@ Version.fromBranch = function (branch) {
if (/^\d+\.\d+$/.test(branch)) return new Version(branch + '.0');
// n.x -> n.(maxVersion + 1).0
const match = branch.match(/^(\d+)\.x$/i)
const match = branch.match(/^(\d+)\.x$/i);
if (match) return nextMinorVersion(match[1]);
// master => (maxMajorVersion + 1).0.0
if (branch === 'master') return nextMajorVersion()
if (branch === 'master') return nextMajorVersion();
throw new Error('unable to convert branch "' + branch + '" to semver');
};
@ -66,9 +66,9 @@ Version.prototype.mergeOpts = function (versioned, overrides) {
const candidates = versioned
.filter(o => this.satisfies(o.version))
.map(o => _.omit(o, 'version'))
.map(o => _.omit(o, 'version'));
return _.merge({}, overrides || {}, ...candidates)
return _.merge({}, overrides || {}, ...candidates);
};
module.exports = Version;

View File

@ -1,11 +1,11 @@
module.exports = _spawn;
var map = require('through2-map');
var split = require('split');
var chalk = require('chalk');
var spawn = require('child_process').spawn;
var path = require('path');
var root = path.resolve(__dirname, '../');
const map = require('through2-map');
const split = require('split');
const chalk = require('chalk');
const spawn = require('child_process').spawn;
const path = require('path');
const root = path.resolve(__dirname, '../');
function indent(line) {
line = String(line).trim();
@ -26,7 +26,7 @@ function _spawn(cmd, args, opts, cb) {
opts.verbose = false;
}
var conf = {
const conf = {
stdio: [
'ignore',
opts.verbose ? 'pipe' : 'ignore',
@ -34,7 +34,7 @@ function _spawn(cmd, args, opts, cb) {
]
};
var subdir;
let subdir;
if (opts.cwd) {
conf.cwd = opts.cwd;
@ -43,7 +43,7 @@ function _spawn(cmd, args, opts, cb) {
console.log(chalk.white.bold((subdir ? subdir + ' ' : '') + '$ ') + cmd + ' ' + args.join(' '));
var cp = spawn(cmd, args, conf);
const cp = spawn(cmd, args, conf);
if (opts.verbose) {
consume(cp.stdout);

View File

@ -17,25 +17,25 @@
*
*******/
var Promise = require('bluebird');
var _ = require('lodash');
var through2 = require('through2');
var map = require('through2-map');
var split = require('split');
var join = require('path').join;
var cp = require('child_process');
var chalk = require('chalk');
var format = require('util').format;
const Promise = require('bluebird');
const _ = require('lodash');
const through2 = require('through2');
const map = require('through2-map');
const split = require('split');
const join = require('path').join;
const cp = require('child_process');
const chalk = require('chalk');
const format = require('util').format;
var NL_RE = /(\r?\n)/g;
var ROOT = join(__dirname, '..');
var GRUNT = join(ROOT, 'node_modules', '.bin', 'grunt');
var ENV = _.clone(process.env);
var JENKINS = !!ENV.JENKINS_HOME;
var TASKS = [];
const NL_RE = /(\r?\n)/g;
const ROOT = join(__dirname, '..');
const GRUNT = join(ROOT, 'node_modules', '.bin', 'grunt');
const ENV = _.clone(process.env);
const JENKINS = !!ENV.JENKINS_HOME;
const TASKS = [];
var output; // main output stream
var taskOut; // task output stream
let output; // main output stream
let taskOut; // task output stream
task('NODE_UNIT', true, function () {
if (!JENKINS) {
@ -46,11 +46,11 @@ task('NODE_UNIT', true, function () {
});
task('NODE_INTEGRATION', true, function () {
var branch = ENV.ES_REF;
const branch = ENV.ES_REF;
return node('scripts/generate', '--no-api', '--branch', branch)
.then(function () {
var target = (JENKINS ? 'jenkins_' : '') + 'integration:' + branch;
const target = (JENKINS ? 'jenkins_' : '') + 'integration:' + branch;
return grunt('esvm:ci_env', 'mocha_' + target, 'esvm_shutdown:ci_env');
});
});
@ -58,9 +58,9 @@ task('NODE_INTEGRATION', true, function () {
task('SAUCE_LABS', false, function () {
return new Promise(function (resolve, reject) {
// build the clients and start the server, once the server is ready call trySaucelabs()
var serverTasks = ['browser_clients:build', 'run:browser_test_server:keepalive'];
const serverTasks = ['browser_clients:build', 'run:browser_test_server:keepalive'];
spawn(GRUNT, serverTasks, function (proc) {
var toLines = split();
const toLines = split();
proc.stdout
.pipe(toLines)
@ -83,14 +83,14 @@ task('SAUCE_LABS', false, function () {
.catch(_.noop);
// attempt to run tests on saucelabs and retry if it fails
var saucelabsAttempts = 0;
let saucelabsAttempts = 0;
function trySaucelabs() {
saucelabsAttempts++;
return new Promise(function (resolve, reject) {
log(chalk.green('saucelabs attempt #', saucelabsAttempts));
spawn(GRUNT, ['saucelabs-mocha'], function (cp) {
var failedTests = 0;
let failedTests = 0;
cp.stdout
.pipe(split())
.pipe(map(function (line) {
@ -141,7 +141,7 @@ execTask('SETUP', function () {
}
}
var match;
let match;
if (match = ENV.ES_V.match(/^(.*)_nightly$/)) {
return [match[1], null];
}
@ -206,7 +206,7 @@ execTask('SETUP', function () {
* utils
******/
function log() {
var chunk = format.apply(null, arguments);
const chunk = format.apply(null, arguments);
(taskOut || output || process.stdout).write(chunk + '\n');
}
@ -218,15 +218,15 @@ function logImportant(text) {
function push(m) {
return function () {
var args = _.toArray(arguments);
var cb = args.pop();
const args = _.toArray(arguments);
const cb = args.pop();
this.push(m.apply(this, args));
cb();
};
}
function indent() {
var str = through2(
const str = through2(
push(function (chunk) { return String(chunk).replace(NL_RE, '$1 '); }),
push(function () { return '\n'; })
);
@ -281,7 +281,7 @@ function execTask(name, task) {
function spawn(file, args, block) {
return new Promise(function (resolve, reject) {
var proc = cp.spawn(file, args, {
const proc = cp.spawn(file, args, {
cwd: ROOT,
env: ENV,
stdio: [0, 'pipe', 'pipe']
@ -290,7 +290,7 @@ function spawn(file, args, block) {
proc.stdout.pipe(taskOut, { end: false });
proc.stderr.pipe(taskOut, { end: false });
var stdout = '';
let stdout = '';
proc.stdout
.pipe(through2(function (chunk, enc, cb) {
stdout += chunk;

View File

@ -1,13 +1,13 @@
module.exports = function (done) {
var _ = require('../../src/lib/utils');
var utils = require('../../grunt/utils');
const _ = require('../../src/lib/utils');
const utils = require('../../grunt/utils');
var chalk = require('chalk');
var fromRoot = _.partial(require('path').join, require('find-root')(__dirname));
var write = require('fs').writeFileSync;
const chalk = require('chalk');
const fromRoot = _.partial(require('path').join, require('find-root')(__dirname));
const write = require('fs').writeFileSync;
var nodeApiIndex = fromRoot('src/lib/apis/index.js');
var browserApiIndex = fromRoot('src/lib/apis/browser_index.js');
const nodeApiIndex = fromRoot('src/lib/apis/index.js');
const browserApiIndex = fromRoot('src/lib/apis/browser_index.js');
write(nodeApiIndex, require('./templates').apiIndex({
branches: utils.branches

View File

@ -1,11 +1,11 @@
module.exports = function (done) {
var _ = require('../../src/lib/utils');
const _ = require('../../src/lib/utils');
var chalk = require('chalk');
var fromRoot = _.partial(require('path').join, require('find-root')(__dirname));
var write = require('fs').writeFile;
const chalk = require('chalk');
const fromRoot = _.partial(require('path').join, require('find-root')(__dirname));
const write = require('fs').writeFile;
var outputPath = fromRoot('docs/configuration.asciidoc');
const outputPath = fromRoot('docs/configuration.asciidoc');
write(outputPath, require('./templates').configurationDocs(), 'utf8', done);
console.log(chalk.white.bold('wrote'), 'configuration docs to', outputPath);
};

View File

@ -1,12 +1,12 @@
module.exports = function (done) {
var _ = require('../../src/lib/utils');
var utils = require('../../grunt/utils');
const _ = require('../../src/lib/utils');
const utils = require('../../grunt/utils');
var chalk = require('chalk');
var fromRoot = _.partial(require('path').join, require('find-root')(__dirname));
var write = require('fs').writeFile;
const chalk = require('chalk');
const fromRoot = _.partial(require('path').join, require('find-root')(__dirname));
const write = require('fs').writeFile;
var outputPath = fromRoot('docs/index.asciidoc');
const outputPath = fromRoot('docs/index.asciidoc');
write(outputPath, require('./templates').docsIndex({
apiFiles: utils.stableBranches.map(function (branch) {

View File

@ -1,8 +1,8 @@
/* jshint curly: false */
var async = require('async');
var fs = require('fs');
var spawn = require('../_spawn');
var argv = require('optimist')
const async = require('async');
const fs = require('fs');
const spawn = require('../_spawn');
let argv = require('optimist')
.options({
verbose: {
alias: 'v',
@ -27,15 +27,15 @@ var argv = require('optimist')
}
});
var path = require('path');
var fromRoot = path.join.bind(path, require('find-root')(__dirname));
var utils = require(fromRoot('grunt/utils'));
var _ = require(fromRoot('src/lib/utils'));
var esUrl = process.env.ES_REPO
const path = require('path');
const fromRoot = path.join.bind(path, require('find-root')(__dirname));
const utils = require(fromRoot('grunt/utils'));
const _ = require(fromRoot('src/lib/utils'));
const esUrl = process.env.ES_REPO
? path.resolve(process.cwd(), process.env.ES_REPO)
: 'https://github.com/elastic/elasticsearch.git';
var branches;
let branches;
if (process.env.npm_config_argv) {
// when called by NPM
@ -51,7 +51,7 @@ if (argv.branch) {
branches = utils.branches;
}
var paths = {
const paths = {
root: fromRoot('.'),
src: fromRoot('src'),
esSrc: fromRoot('src/_elasticsearch_'),
@ -70,7 +70,7 @@ var paths = {
};
function isDirectory(dir) {
var stat;
let stat;
try { stat = fs.statSync(dir); } catch (e) {}
return (stat && stat.isDirectory());
}
@ -129,15 +129,15 @@ function initStep() {
}
function fetchBranchesStep() {
var branchArgs = branches.map(function (b) { return b + ':' + b; });
const branchArgs = branches.map(function (b) { return b + ':' + b; });
return spawnStep('git', ['fetch', '--no-tags', '--force', 'origin'].concat(branchArgs), paths.esSrc);
}
function findGeneratedApiFiles() {
var anyApiMethodDocs = /^(configuration|index|api_methods).*\.asciidoc$/;
var anyApiJsFiled = /^.+\.js$/;
var allBranches = _.isEqual(branches, utils.branches);
const anyApiMethodDocs = /^(configuration|index|api_methods).*\.asciidoc$/;
const anyApiJsFiled = /^.+\.js$/;
const allBranches = _.isEqual(branches, utils.branches);
if (allBranches) {
return [
@ -147,11 +147,11 @@ function findGeneratedApiFiles() {
}
return branches.reduce(function (files, branch) {
var b = _.snakeCase(branch);
const b = _.snakeCase(branch);
files.push(dirOpts(paths.docs, 'api_methods_' + b + '.asciidoc'));
var isDefault = branch === utils.branches._default;
const isDefault = branch === utils.branches._default;
if (isDefault) {
files.push(dirOpts(paths.docs, 'api_methods.asciidoc'));
}
@ -164,8 +164,8 @@ function findGeneratedApiFiles() {
function clearGeneratedFiles() {
var esArchives = /^_elasticsearch_(master|[\dx_]+|\.tar)$/;
var generatedFiles = [];
const esArchives = /^_elasticsearch_(master|[\dx_]+|\.tar)$/;
const generatedFiles = [];
if (argv.api) {
generatedFiles.push(findGeneratedApiFiles());
@ -173,7 +173,7 @@ function clearGeneratedFiles() {
generatedFiles.push(dirRegex(paths.src, esArchives));
var rmSteps = _.chain(generatedFiles)
const rmSteps = _.chain(generatedFiles)
.flatten()
.uniq()
.map(function (path) {
@ -191,7 +191,7 @@ function clearGeneratedFiles() {
function removePrevArchive(branch) {
if (!argv.update) return;
var dir = paths.getArchiveDir(branch);
const dir = paths.getArchiveDir(branch);
if (!isDirectory(dir)) return;
return spawnStep('rm', ['-rf', dir], paths.root);
@ -199,10 +199,10 @@ function removePrevArchive(branch) {
function createArchive(branch) {
return function (done) {
var dir = paths.getArchiveDir(branch);
var tarball = paths.getArchiveTarball(branch);
var specPathInRepo = paths.getSpecPathInRepo(branch);
var subDirCount = _.countBy(specPathInRepo, p => p === '/').true || 0;
const dir = paths.getArchiveDir(branch);
const tarball = paths.getArchiveTarball(branch);
const specPathInRepo = paths.getSpecPathInRepo(branch);
const subDirCount = _.countBy(specPathInRepo, p => p === '/').true || 0;
if (isDirectory(dir)) {
console.log(branch + ' archive already exists');
@ -227,7 +227,7 @@ function generateStep(branch) {
};
}
var steps = [
const steps = [
initStep(),
clearGeneratedFiles(),
fetchBranchesStep()

View File

@ -3,26 +3,26 @@ module.exports = function (branch, done) {
* Read the API actions form the rest-api-spec repo.
* @type {[type]}
*/
var _ = require('../../src/lib/utils');
var utils = require('../../grunt/utils');
var fs = require('fs');
var async = require('async');
var chalk = require('chalk');
var path = require('path');
var fromRoot = path.join.bind(path, require('find-root')(__dirname));
var templates = require('./templates');
var Version = require('../Version');
var urlParamRE = /\{(\w+)\}/g;
const _ = require('../../src/lib/utils');
const utils = require('../../grunt/utils');
const fs = require('fs');
const async = require('async');
const chalk = require('chalk');
const path = require('path');
const fromRoot = path.join.bind(path, require('find-root')(__dirname));
const templates = require('./templates');
const Version = require('../Version');
const urlParamRE = /\{(\w+)\}/g;
var files; // populated in readSpecFiles
var apiSpec; // populated by parseSpecFiles
var docVars; // slightly modified clone of apiSpec for the docs
let files; // populated in readSpecFiles
let apiSpec; // populated by parseSpecFiles
let docVars; // slightly modified clone of apiSpec for the docs
var branchSuffix = utils.branchSuffix(branch);
var esDir = fromRoot('src/_elasticsearch_' + _.snakeCase(branch));
const branchSuffix = utils.branchSuffix(branch);
const esDir = fromRoot('src/_elasticsearch_' + _.snakeCase(branch));
var version = Version.fromBranch(branch);
var overrides = version.mergeOpts(require('./overrides'), {
const version = Version.fromBranch(branch);
const overrides = version.mergeOpts(require('./overrides'), {
aliases: {},
mergeConcatParams: {},
paramAsBody: {},
@ -31,7 +31,7 @@ module.exports = function (branch, done) {
descriptions: {},
});
var steps = [
const steps = [
readSpecFiles,
parseSpecFiles,
writeApiFile
@ -51,13 +51,13 @@ module.exports = function (branch, done) {
});
function readSpecFiles(done) {
var apiDir = path.join(esDir, 'rest-api-spec/api/');
const apiDir = path.join(esDir, 'rest-api-spec/api/');
files = fs.readdirSync(apiDir)
.filter(function (filename) {
return filename[0] !== '_'
return filename[0] !== '_';
})
.map(function (filename) {
var module = require(apiDir + filename);
const module = require(apiDir + filename);
delete require.cache[apiDir + filename];
return module;
});
@ -65,7 +65,7 @@ module.exports = function (branch, done) {
}
function parseSpecFiles(done) {
var actions = [];
const actions = [];
files.forEach(function (spec) {
__puke__transformSpec(spec).forEach(function (action) {
@ -74,16 +74,16 @@ module.exports = function (branch, done) {
});
// collect the namespaces from the action locations
var namespaces = _.filter(_.map(actions, function (action) {
const namespaces = _.filter(_.map(actions, function (action) {
return action.location
.split('.')
.slice(0, -1)
.filter(step => step !== 'prototype')
.join('.prototype.')
.join('.prototype.');
}));
// seperate the proxy actions
var groups = _.groupBy(actions, function (action) {
const groups = _.groupBy(actions, function (action) {
return action.proxy ? 'proxies' : 'normal';
});
@ -95,7 +95,7 @@ module.exports = function (branch, done) {
};
if (!_.find(apiSpec.actions, { name: 'create' })) {
var create = _.assign(
const create = _.assign(
{},
_.cloneDeep(_.find(apiSpec.actions, { name: 'index' })),
{
@ -115,8 +115,8 @@ module.exports = function (branch, done) {
[].concat(apiSpec.actions, apiSpec.proxies)
.forEach(function (action) {
var examplePath = overrides.examples[action.name] || action.name + '.asciidoc';
var descriptionPath = overrides.descriptions[action.name] || action.name + '.asciidoc';
const examplePath = overrides.examples[action.name] || action.name + '.asciidoc';
const descriptionPath = overrides.descriptions[action.name] || action.name + '.asciidoc';
try {
action.examples = fs.readFileSync(fromRoot('docs/_examples', examplePath), 'utf8');
@ -129,13 +129,13 @@ module.exports = function (branch, done) {
} catch (e) {
action.description = '// no description';
}
})
});
done();
}
function writeApiFile(done) {
var outputPath = fromRoot('src/lib/apis/' + _.snakeCase(branch) + '.js');
const outputPath = fromRoot('src/lib/apis/' + _.snakeCase(branch) + '.js');
fs.writeFileSync(outputPath, templates.apiFile(apiSpec));
console.log(chalk.white.bold('wrote'), apiSpec.actions.length, 'api actions to', outputPath);
done();
@ -173,7 +173,7 @@ module.exports = function (branch, done) {
}
function writeMethodDocs(done) {
var filename = fromRoot('docs/api_methods' + branchSuffix + '.asciidoc');
const filename = fromRoot('docs/api_methods' + branchSuffix + '.asciidoc');
fs.writeFile(
filename,
templates.apiMethods(docVars),
@ -187,7 +187,7 @@ module.exports = function (branch, done) {
}
function __puke__transformSpec(spec) { // eslint-disable-line
var actions = [];
const actions = [];
// itterate all of the specs within the file, should only be one
_.each(spec, function (def, name) {
@ -198,10 +198,10 @@ module.exports = function (branch, done) {
def.documentation = 'http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/cat.html';
}
var steps = name.split('.');
const steps = name.split('.');
function transformParamKeys(note, param, key) {
var cmlKey = _.camelCase(key);
const cmlKey = _.camelCase(key);
if (cmlKey !== key) {
param.name = key;
}
@ -211,24 +211,24 @@ module.exports = function (branch, done) {
def.url.params = _.transform(def.url.params, transformParamKeys, {});
def.url.parts = _.transform(def.url.parts, transformParamKeys, {});
var allParams = _.extend({}, def.url.params, def.url.parts);
const allParams = _.extend({}, def.url.params, def.url.parts);
_.forOwn(allParams, (paramSpec, paramName) => {
const toMerge = _.get(overrides, ['mergeConcatParams', name, paramName])
const toMerge = _.get(overrides, ['mergeConcatParams', name, paramName]);
if (toMerge) {
_.merge(paramSpec, toMerge, (dest, src) => {
if (_.isArray(dest) && _.isArray(src)) {
return dest.concat(src)
return dest.concat(src);
}
})
});
}
if (paramSpec.options) {
const invalidOpts = paramSpec.options.some(opt => typeof opt !== 'string')
if (invalidOpts) throw new Error(`${name} has options that are not strings...`)
const invalidOpts = paramSpec.options.some(opt => typeof opt !== 'string');
if (invalidOpts) throw new Error(`${name} has options that are not strings...`);
}
})
});
var spec = {
const spec = {
name: name,
methods: _.map(def.methods, function (m) { return m.toUpperCase(); }),
params: def.url.params,
@ -248,15 +248,15 @@ module.exports = function (branch, done) {
spec.requestTimeout = 3000;
}
var urls = _.difference(def.url.paths, overrides.aliases[name]);
var urlSignatures = [];
let urls = _.difference(def.url.paths, overrides.aliases[name]);
const urlSignatures = [];
urls = _.map(urls, function (url) {
var optionalVars = {};
var requiredVars = {};
var param;
var name;
var target;
var match;
const optionalVars = {};
const requiredVars = {};
let param;
let name;
let target;
let match;
if (url.charAt(0) !== '/') {
url = '/' + url;
@ -317,10 +317,10 @@ module.exports = function (branch, done) {
}
// escape method names with "special" keywords
var location = spec.name.split('.').join('.prototype.')
const location = spec.name.split('.').join('.prototype.')
.replace(/(^|\.)(delete|default)(\.|$)/g, '[\'$2\']');
var action = {
const action = {
_methods: spec.methods,
spec: _.pick(spec, [
'params',
@ -339,7 +339,7 @@ module.exports = function (branch, done) {
};
function hasMethod(/* ...methods */) {
for (var i = 0; i < arguments.length; i++) {
for (let i = 0; i < arguments.length; i++) {
if (~action._methods.indexOf(arguments[i])) {
continue;
} else {
@ -352,7 +352,7 @@ module.exports = function (branch, done) {
return hasMethod.apply(null, arguments) && arguments.length === action._methods.length;
}
var method;
let method;
if (action._methods.length === 1) {
method = action._methods[0];

View File

@ -1,8 +1,8 @@
var _ = require('../../../src/lib/utils');
var utils = require('../../../grunt/utils');
var fs = require('fs');
var path = require('path');
const _ = require('../../../src/lib/utils');
const utils = require('../../../grunt/utils');
const fs = require('fs');
const path = require('path');
/**
@ -31,20 +31,20 @@ function stringify(thing, pretty) {
* We'll collect the templates here
* @type {Object}
*/
var templates = {};
const templates = {};
/**
* These keys will be available as local variables to each template
* @type {Object}
*/
var templateGlobals = {
const templateGlobals = {
stringify: stringify,
_: _,
indent: function (block, spaces) {
var indent = _.repeat(' ', spaces);
const indent = _.repeat(' ', spaces);
return block.split('\n').map(function (line) {
return indent + line;
}).join('\n');
@ -58,7 +58,7 @@ var templateGlobals = {
switch (type && type.toLowerCase ? type.toLowerCase() : 'any') {
case 'time':
case 'duration':
if (paramName === 'timestamp') return 'Timestamp'
if (paramName === 'timestamp') return 'Timestamp';
return '<<api-param-type-duration-string,`DurationString`>>';
case 'any':
return 'anything';
@ -96,7 +96,7 @@ var templateGlobals = {
};
fs.readdirSync(path.resolve(__dirname)).forEach(function (filename) {
var name = filename.replace(/\..+$/, '');
const name = filename.replace(/\..+$/, '');
if (name !== 'index') {
templates[name] = _.template(
fs.readFileSync(path.resolve(__dirname, filename), 'utf8'),

View File

@ -2,16 +2,16 @@ module.exports = function (branch, done) {
/**
* Creates a JSON version of the YAML test suite that can be simply bundled for use in the browser.
*/
var jsYaml = require('js-yaml');
var fs = require('fs');
var async = require('async');
var chalk = require('chalk');
var path = require('path');
var fromRoot = path.join.bind(path, require('find-root')(__dirname));
var _ = require(fromRoot('src/lib/utils'));
var tests = {}; // populated in readYamlTests
const jsYaml = require('js-yaml');
const fs = require('fs');
const async = require('async');
const chalk = require('chalk');
const path = require('path');
const fromRoot = path.join.bind(path, require('find-root')(__dirname));
const _ = require(fromRoot('src/lib/utils'));
const tests = {}; // populated in readYamlTests
var esDir = fromRoot('src/_elasticsearch_' + _.snakeCase(branch));
const esDir = fromRoot('src/_elasticsearch_' + _.snakeCase(branch));
// generate the yaml tests
async.series([
@ -21,16 +21,16 @@ module.exports = function (branch, done) {
], done);
function readYamlTests(done) {
var testDir = path.join(esDir, 'rest-api-spec/test/');
const testDir = path.join(esDir, 'rest-api-spec/test/');
function readDirectories(dir) {
fs.readdirSync(dir).forEach(function (filename) {
var filePath = path.join(dir, filename);
var stat = fs.statSync(filePath);
const filePath = path.join(dir, filename);
const stat = fs.statSync(filePath);
if (stat.isDirectory()) {
readDirectories(filePath);
} else if (filename.match(/\.yaml$/)) {
var file = tests[path.relative(testDir, filePath)] = [];
const file = tests[path.relative(testDir, filePath)] = [];
jsYaml.loadAll(fs.readFileSync(filePath, 'utf8'), function (doc) {
file.push(doc);
});
@ -43,14 +43,14 @@ module.exports = function (branch, done) {
}
function writeYamlTests(done) {
var testFile = fromRoot('test/integration/yaml_suite/yaml_tests_' + _.snakeCase(branch) + '.json');
const testFile = fromRoot('test/integration/yaml_suite/yaml_tests_' + _.snakeCase(branch) + '.json');
fs.writeFileSync(testFile, JSON.stringify(tests, null, ' '), 'utf8');
console.log(chalk.white.bold('wrote') + ' YAML tests as JSON to', testFile);
done();
}
function writeTestIndex(done) {
var file = fromRoot('test/integration/yaml_suite/index_' + _.snakeCase(branch) + '.js');
const file = fromRoot('test/integration/yaml_suite/index_' + _.snakeCase(branch) + '.js');
fs.writeFileSync(file, 'require(\'./run\')(\'' + branch + '\');\n', 'utf8');
console.log(chalk.white.bold('wrote') + ' YAML index to', file);
done();

View File

@ -1,15 +1,15 @@
var fs = require('fs');
var spawn = require('../_spawn');
var async = require('async');
var _ = require('lodash');
const fs = require('fs');
const spawn = require('../_spawn');
const async = require('async');
const _ = require('lodash');
var root = require('path').join(__dirname, '../..');
var bowerDir = root + '/src/bower_es_js';
const root = require('path').join(__dirname, '../..');
const bowerDir = root + '/src/bower_es_js';
// get both the bower and node package files
var bowerJson = require(bowerDir + '/bower.json');
var bowerPackageJson = require(bowerDir + '/package.json');
var esjsJson = require(root + '/package.json');
const bowerJson = require(bowerDir + '/bower.json');
const bowerPackageJson = require(bowerDir + '/package.json');
const esjsJson = require(root + '/package.json');
// update the version to match the node version
bowerJson.version = esjsJson.version;