Slight refactor to the api module, so it will simply extend the client like it did previously, and

also exposes itself on the module.exports so that is can be referenced externally.

Added "grunt run" task which currently has a single config, generate_js_api.

Removed the api spec submodule, generator just downloads master when it runs.
This commit is contained in:
Spencer Alger
2013-10-23 16:07:03 -07:00
parent c14d37aa42
commit 20462c73fd
10 changed files with 87 additions and 22022 deletions

View File

@ -7,7 +7,6 @@ var mkdirp = require('mkdirp');
var outputPath = _.joinPath(__dirname, '../../../src/lib/api.js');
var templates = require('./templates');
var specs = require('./spec');
// completely delete the output directory
var clean = (function () {
@ -41,7 +40,7 @@ var clean = (function () {
})();
exports.run = function () {
require('./spec').on('ready', function (specs) {
var defs = [];
var namespaces = [];
@ -123,6 +122,6 @@ exports.run = function () {
actions: actions,
namespaces: _.unique(namespaces.sort(), true)
}));
};
});
exports.run();

View File

@ -1,48 +1,85 @@
var _ = require('../../../src/lib/utils')
var docs = _.requireDir(module, '../../../es_api_spec/api');
var EventEmitter = require('events').EventEmitter;
var aliases = require('./aliases');
var https = require('https');
var unzip = require('unzip');
var castNotFoundRE = /exists/;
var usesBulkBodyRE = /^(bulk|msearch)$/;
var defs = [];
var specCount = 0;
var doneParsing = false;
// itterate all of the found docs
Object.keys(docs).forEach(function (filename) {
Object.keys(docs[filename]).forEach(function (name) {
var def = docs[filename][name];
def.name = name;
defs.push(def);
https.get('https://codeload.github.com/elasticsearch/elasticsearch-rest-api-spec/zip/master', function (incoming) {
incoming.pipe(unzip.Parse())
.on('entry', function (entry) {
if (entry.type === 'File' && entry.path.match(/(^|\/)api\/.*\.json$/)) {
specCount++;
return collectEntry(entry);
} else {
entry.autodrain();
}
})
});
.on('close', function () {
doneParsing = true;
if (specs.length === specCount) {
module.exports.emit('ready', specs);
}
});
})
module.exports = _.map(defs, function (def) {
var name = def.name;
var steps = name.split('.');
var specs = [];
var spec = {
name: name,
methods: _.map(def.methods, function (m) { return m.toUpperCase(); }),
docUrl: def.documentation,
urlParts: def.url.parts,
params: def.url.params,
urls: _.difference(def.url.paths, aliases[name]),
body: def.body || null,
path2lib: _.repeat('../', steps.length + 1) + 'lib/'
};
function collectEntry(entry) {
var file = '';
if (def.body && def.body.requires) {
spec.needBody = true;
function onData (chunk) {
file+= chunk;
}
if (usesBulkBodyRE.test(name)) {
spec.bulkBody = true;
function onEnd () {
entry.removeListener('data', onData);
entry.removeListener('end', onEnd);
process.nextTick(function () {
transformFile(file);
});
}
if (castNotFoundRE.test(name)) {
spec.castNotFound = true;
}
entry.on('data', onData)
entry.on('end', onEnd);
}
return spec;
});
function transformFile(file) {
// itterate all of the specs within the file, should only be one
_.each(JSON.parse(file), function (def, name) {
var steps = name.split('.');
var spec = {
name: name,
methods: _.map(def.methods, function (m) { return m.toUpperCase(); }),
docUrl: def.documentation,
urlParts: def.url.parts,
params: def.url.params,
urls: _.difference(def.url.paths, aliases[name]),
body: def.body || null,
path2lib: _.repeat('../', steps.length + 1) + 'lib/'
};
if (def.body && def.body.requires) {
spec.needBody = true;
}
if (usesBulkBodyRE.test(name)) {
spec.bulkBody = true;
}
if (castNotFoundRE.test(name)) {
spec.castNotFound = true;
}
if (specs.push(spec) === specCount && doneParsing) {
module.exports.emit('ready', specs);
}
})
}
module.exports = new EventEmitter();