Merge branch 'master' into 1.5
This commit is contained in:
@ -12,10 +12,12 @@ client.deleteByQuery({
|
||||
.Deleting documents using the Query DSL
|
||||
[source,js]
|
||||
---------
|
||||
client.delete_by_query({
|
||||
client.deleteByQuery({
|
||||
index: 'posts',
|
||||
body: {
|
||||
term: { published: false }
|
||||
query: {
|
||||
term: { published: false }
|
||||
}
|
||||
}
|
||||
}, function (error, response) {
|
||||
// ...
|
||||
|
||||
@ -402,10 +402,12 @@ client.deleteByQuery({
|
||||
.Deleting documents using the Query DSL
|
||||
[source,js]
|
||||
---------
|
||||
client.delete_by_query({
|
||||
client.deleteByQuery({
|
||||
index: 'posts',
|
||||
body: {
|
||||
term: { published: false }
|
||||
query: {
|
||||
term: { published: false }
|
||||
}
|
||||
}
|
||||
}, function (error, response) {
|
||||
// ...
|
||||
|
||||
@ -484,10 +484,12 @@ client.deleteByQuery({
|
||||
.Deleting documents using the Query DSL
|
||||
[source,js]
|
||||
---------
|
||||
client.delete_by_query({
|
||||
client.deleteByQuery({
|
||||
index: 'posts',
|
||||
body: {
|
||||
term: { published: false }
|
||||
query: {
|
||||
term: { published: false }
|
||||
}
|
||||
}
|
||||
}, function (error, response) {
|
||||
// ...
|
||||
|
||||
@ -484,10 +484,12 @@ client.deleteByQuery({
|
||||
.Deleting documents using the Query DSL
|
||||
[source,js]
|
||||
---------
|
||||
client.delete_by_query({
|
||||
client.deleteByQuery({
|
||||
index: 'posts',
|
||||
body: {
|
||||
term: { published: false }
|
||||
query: {
|
||||
term: { published: false }
|
||||
}
|
||||
}
|
||||
}, function (error, response) {
|
||||
// ...
|
||||
@ -2314,6 +2316,8 @@ The default method is `POST` and the usual <<api-conventions,params and return v
|
||||
[horizontal]
|
||||
`dryRun`::
|
||||
`Boolean` -- Simulate the operation only and return the resulting state
|
||||
`explain`::
|
||||
`Boolean` -- Return an explanation of why the commands can or cannot be executed
|
||||
`filterMetadata`::
|
||||
`Boolean` -- Don't return cluster state metadata (default: false)
|
||||
`masterTimeout`::
|
||||
|
||||
@ -484,10 +484,12 @@ client.deleteByQuery({
|
||||
.Deleting documents using the Query DSL
|
||||
[source,js]
|
||||
---------
|
||||
client.delete_by_query({
|
||||
client.deleteByQuery({
|
||||
index: 'posts',
|
||||
body: {
|
||||
term: { published: false }
|
||||
query: {
|
||||
term: { published: false }
|
||||
}
|
||||
}
|
||||
}, function (error, response) {
|
||||
// ...
|
||||
@ -2314,6 +2316,8 @@ The default method is `POST` and the usual <<api-conventions,params and return v
|
||||
[horizontal]
|
||||
`dryRun`::
|
||||
`Boolean` -- Simulate the operation only and return the resulting state
|
||||
`explain`::
|
||||
`Boolean` -- Return an explanation of why the commands can or cannot be executed
|
||||
`filterMetadata`::
|
||||
`Boolean` -- Don't return cluster state metadata (default: false)
|
||||
`masterTimeout`::
|
||||
|
||||
@ -76,7 +76,7 @@
|
||||
"url": "http://github.com/elasticsearch/elasticsearch-js.git"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "node scripts/generate --branch default && grunt test",
|
||||
"test": "grunt test",
|
||||
"generate": "node scripts/generate"
|
||||
},
|
||||
"engines": {
|
||||
|
||||
@ -71,8 +71,14 @@ function manage_es {
|
||||
local ES_URL="https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-${ES_RELEASE}.zip"
|
||||
local ES_DIR="${SNAPSHOTS}/${ES_VERSION}"
|
||||
else
|
||||
if [[ $ES_BRANCH == "master" ]]; then
|
||||
local JDK='JDK7'
|
||||
else
|
||||
local JDK='JDK6'
|
||||
fi
|
||||
|
||||
local ES_VERSION="${ES_BRANCH}_nightly"
|
||||
local ES_URL="http://s3-us-west-2.amazonaws.com/build.elasticsearch.org/origin/$ES_BRANCH/nightly/JDK6/elasticsearch-latest-SNAPSHOT.zip"
|
||||
local ES_URL="http://s3-us-west-2.amazonaws.com/build.elasticsearch.org/origin/$ES_BRANCH/nightly/$JDK/elasticsearch-latest-SNAPSHOT.zip"
|
||||
local DATE=`date +%Y_%m_%d`
|
||||
local ES_DIR="${SNAPSHOTS}/${ES_VERSION}_${DATE}"
|
||||
fi
|
||||
|
||||
@ -20,19 +20,20 @@ var argv = require('optimist')
|
||||
.argv;
|
||||
|
||||
var es = require('../../../src/elasticsearch');
|
||||
var _ = require('../../../src/lib/utils');
|
||||
var async = require('async');
|
||||
var path = require('path');
|
||||
var moment = require('moment');
|
||||
var makeSamples = require('./samples').make;
|
||||
var startingMoment = moment().startOf('day').subtract('days', argv.days);
|
||||
var endingMoment = moment().endOf('day').add('days', argv.days);
|
||||
|
||||
var startingMoment = moment().utc().startOf('day').subtract('days', argv.days);
|
||||
var endingMoment = moment().utc().endOf('day').add('days', argv.days);
|
||||
|
||||
var clientConfig = {
|
||||
log: {
|
||||
level: 'trace',
|
||||
type: 'file',
|
||||
path: path.join(__dirname, '../../../log')
|
||||
}
|
||||
// log: {
|
||||
// level: 'trace',
|
||||
// type: 'file',
|
||||
// path: path.join(__dirname, '../../../log')
|
||||
// }
|
||||
};
|
||||
|
||||
if (argv.host) {
|
||||
@ -42,75 +43,18 @@ if (argv.host) {
|
||||
}
|
||||
|
||||
var client = new es.Client(clientConfig);
|
||||
var samples = makeSamples(startingMoment, endingMoment);
|
||||
|
||||
console.log('Generating', argv.count, 'events across ±', argv.days, 'days');
|
||||
|
||||
fillIndecies(function () {
|
||||
var actions = [];
|
||||
var samples = makeSamples(startingMoment, endingMoment);
|
||||
var indices = {};
|
||||
var events = [];
|
||||
var doneEventing = false;
|
||||
var eventsPerBulk = 3500;
|
||||
var eventElementsPerBulk = eventsPerBulk * 2; // events are stored next to their headers, so each event has two elements;
|
||||
|
||||
async.times(argv.count, function (i, done) {
|
||||
// random date, plus less random time
|
||||
var date = moment(samples.randomMsInDayRange())
|
||||
.utc()
|
||||
.startOf('day')
|
||||
.add('milliseconds', samples.lessRandomMsInDay());
|
||||
|
||||
var event = {
|
||||
index: date.format('[logstash-]YYYY.MM.DD'),
|
||||
'@timestamp': date.toISOString(),
|
||||
ip: samples.ips(),
|
||||
extension: samples.extensions(),
|
||||
response: samples.responseCodes(),
|
||||
country: samples.countries(),
|
||||
point: samples.airports(),
|
||||
'@tags': [samples.tags(), samples.tags2()],
|
||||
utc_time: date.toISOString(),
|
||||
referer: 'http://' + samples.referrers() + '/' + samples.tags() + '/' + samples.astronauts(),
|
||||
agent: samples.userAgents(),
|
||||
};
|
||||
|
||||
event.clientip = event.ip;
|
||||
event.bytes = event.response < 500 ? samples.lessRandomRespSize() : 0;
|
||||
event.request = '/' + samples.astronauts() + '.' + event.extension;
|
||||
event.memory = event.extension === 'php' ? event.bytes * 40 : 0;
|
||||
if (event.memory) {
|
||||
event.phpmemory = event.memory;
|
||||
}
|
||||
|
||||
event['@message'] = event.ip + ' - - [' + date.toISOString() + '] "GET ' + event.request + ' HTTP/1.1" ' +
|
||||
event.response + ' ' + event.bytes + ' "-" "' + event.agent + '"';
|
||||
|
||||
actions.push({
|
||||
index: {
|
||||
_index: event.index,
|
||||
_type: samples.types(),
|
||||
_id: i
|
||||
}
|
||||
});
|
||||
actions.push(event);
|
||||
|
||||
if (actions.length === 3000 || i === argv.count - 1) {
|
||||
console.info('writing', actions.length / 2, 'documents');
|
||||
client.bulk({
|
||||
body: actions
|
||||
}, done);
|
||||
actions = [];
|
||||
} else {
|
||||
done();
|
||||
}
|
||||
}, function (err) {
|
||||
if (err) {
|
||||
throw err;
|
||||
} else {
|
||||
console.log('Done!');
|
||||
process.exit();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
function fillIndecies(cb) {
|
||||
var movingDate = moment(startingMoment);
|
||||
function createIndex(indexName, done) {
|
||||
console.log('made index', indexName);
|
||||
var indexBody = {
|
||||
mappings: {
|
||||
_default_: {
|
||||
@ -141,45 +85,105 @@ function fillIndecies(cb) {
|
||||
}
|
||||
}
|
||||
},
|
||||
clientip: {
|
||||
type: 'ip'
|
||||
},
|
||||
ip: {
|
||||
type: 'ip'
|
||||
},
|
||||
memory: {
|
||||
type: 'double'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
indexPushActions = [];
|
||||
};
|
||||
|
||||
function createDateIndex(indexName) {
|
||||
return function (done) {
|
||||
client.indices.create({
|
||||
ignore: 400,
|
||||
index: indexName,
|
||||
body: indexBody
|
||||
}, function (err, resp) {
|
||||
if (err) {
|
||||
done(err);
|
||||
} else {
|
||||
done(null, resp.error ? 'existed' : 'created');
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
while (movingDate.unix() < endingMoment.unix()) {
|
||||
indexPushActions.push(createDateIndex(movingDate.format('[logstash-]YYYY.MM.DD')));
|
||||
movingDate.add('day', 1);
|
||||
}
|
||||
|
||||
async.parallel(indexPushActions, function (err, responses) {
|
||||
if (err) {
|
||||
console.error(err.message = 'Unable to create indicies: ' + err.message);
|
||||
console.error(err.stack);
|
||||
} else {
|
||||
_.each(_.groupBy(responses), function (list, did) {
|
||||
console.info(list.length, 'indicies', did);
|
||||
});
|
||||
cb();
|
||||
}
|
||||
});
|
||||
client.indices.create({
|
||||
ignore: 400,
|
||||
index: indexName,
|
||||
body: indexBody
|
||||
}, done);
|
||||
}
|
||||
|
||||
var bulk = async.queue(function (chunk, done) {
|
||||
if (typeof chunk === 'string') {
|
||||
return createIndex(chunk, done);
|
||||
}
|
||||
|
||||
console.info('writing', chunk.length / 2, 'documents');
|
||||
client.bulk({
|
||||
body: chunk
|
||||
}, done);
|
||||
}, 3);
|
||||
|
||||
bulk.drain = function () {
|
||||
if (!doneEventing) {
|
||||
// console.log('indexed faster than the events were created');
|
||||
return;
|
||||
}
|
||||
|
||||
client.close();
|
||||
console.log('done');
|
||||
};
|
||||
|
||||
async.timesSeries(argv.count, function (i, done) {
|
||||
|
||||
// random date, plus less random time
|
||||
var date = moment(samples.randomMsInDayRange())
|
||||
.utc()
|
||||
.startOf('day')
|
||||
.add('milliseconds', samples.lessRandomMsInDay());
|
||||
|
||||
var event = {};
|
||||
|
||||
event.index = date.format('[logstash-]YYYY.MM.DD');
|
||||
event['@timestamp'] = date.toISOString();
|
||||
event.ip = samples.ips();
|
||||
event.extension = samples.extensions();
|
||||
event.response = samples.responseCodes();
|
||||
event.country = samples.countries();
|
||||
event.point = samples.airports();
|
||||
event['@tags'] = [
|
||||
samples.tags(),
|
||||
samples.tags2()
|
||||
];
|
||||
event.utc_time = date.toISOString();
|
||||
event.referer = 'http://' + samples.referrers() + '/' + samples.tags() + '/' + samples.astronauts();
|
||||
event.agent = samples.userAgents();
|
||||
event.clientip = event.ip;
|
||||
event.bytes = event.response < 500 ? samples.lessRandomRespSize() : 0;
|
||||
event.request = '/' + samples.astronauts() + '.' + event.extension;
|
||||
if (event.extension === 'php') {
|
||||
event.phpmemory = event.memory = event.bytes * 40;
|
||||
}
|
||||
event['@message'] = event.ip + ' - - [' + date.toISOString() + '] "GET ' + event.request + ' HTTP/1.1" ' +
|
||||
event.response + ' ' + event.bytes + ' "-" "' + event.agent + '"';
|
||||
|
||||
|
||||
if (indices[event.index] !== true) {
|
||||
bulk.push(event.index); // when it receives a string it handles that immediately
|
||||
indices[event.index] = true;
|
||||
}
|
||||
|
||||
events.push(
|
||||
{
|
||||
index: {
|
||||
_index: event.index,
|
||||
_type: samples.types(),
|
||||
_id: i
|
||||
}
|
||||
},
|
||||
event
|
||||
);
|
||||
|
||||
// eventsPerBulk must be multiplied by 2 because each event is two elements long
|
||||
if (events.length === eventElementsPerBulk || i === argv.count - 1) {
|
||||
bulk.push([events.splice(0, eventElementsPerBulk)]);
|
||||
}
|
||||
|
||||
setImmediate(done);
|
||||
}, function () {
|
||||
console.log('done creating events');
|
||||
doneEventing = true;
|
||||
});
|
||||
@ -87,6 +87,10 @@ exports.make = function (startingMoment, endingMoment) {
|
||||
});
|
||||
|
||||
return _.transform(sets, function (note, set, name) {
|
||||
if (name === 'days') {
|
||||
return note[name] = set;
|
||||
}
|
||||
|
||||
note[name] = _.bindKey(set, 'get');
|
||||
}, {});
|
||||
};
|
||||
|
||||
@ -12,7 +12,6 @@ function WeightedList(list) {
|
||||
|
||||
_.forEach(list, _.bindKey(this, 'push'));
|
||||
|
||||
console.log(this);
|
||||
}
|
||||
_.inherits(WeightedList, Array);
|
||||
|
||||
|
||||
@ -843,7 +843,9 @@ api.cluster.prototype.state = ca({
|
||||
'blocks',
|
||||
'metadata',
|
||||
'nodes',
|
||||
'routing_table'
|
||||
'routing_table',
|
||||
'master_node',
|
||||
'version'
|
||||
]
|
||||
},
|
||||
index: {
|
||||
@ -861,7 +863,9 @@ api.cluster.prototype.state = ca({
|
||||
'blocks',
|
||||
'metadata',
|
||||
'nodes',
|
||||
'routing_table'
|
||||
'routing_table',
|
||||
'master_node',
|
||||
'version'
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -775,6 +775,7 @@ api.cluster.prototype.putSettings = ca({
|
||||
*
|
||||
* @param {Object} params - An object with parameters used to carry out this action
|
||||
* @param {Boolean} params.dryRun - Simulate the operation only and return the resulting state
|
||||
* @param {Boolean} params.explain - Return an explanation of why the commands can or cannot be executed
|
||||
* @param {Boolean} params.filterMetadata - Don't return cluster state metadata (default: false)
|
||||
* @param {Date, Number} params.masterTimeout - Explicit operation timeout for connection to master node
|
||||
* @param {Date, Number} params.timeout - Explicit operation timeout
|
||||
@ -785,6 +786,9 @@ api.cluster.prototype.reroute = ca({
|
||||
type: 'boolean',
|
||||
name: 'dry_run'
|
||||
},
|
||||
explain: {
|
||||
type: 'boolean'
|
||||
},
|
||||
filterMetadata: {
|
||||
type: 'boolean',
|
||||
name: 'filter_metadata'
|
||||
@ -843,7 +847,9 @@ api.cluster.prototype.state = ca({
|
||||
'blocks',
|
||||
'metadata',
|
||||
'nodes',
|
||||
'routing_table'
|
||||
'routing_table',
|
||||
'master_node',
|
||||
'version'
|
||||
]
|
||||
},
|
||||
index: {
|
||||
@ -861,7 +867,9 @@ api.cluster.prototype.state = ca({
|
||||
'blocks',
|
||||
'metadata',
|
||||
'nodes',
|
||||
'routing_table'
|
||||
'routing_table',
|
||||
'master_node',
|
||||
'version'
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -775,6 +775,7 @@ api.cluster.prototype.putSettings = ca({
|
||||
*
|
||||
* @param {Object} params - An object with parameters used to carry out this action
|
||||
* @param {Boolean} params.dryRun - Simulate the operation only and return the resulting state
|
||||
* @param {Boolean} params.explain - Return an explanation of why the commands can or cannot be executed
|
||||
* @param {Boolean} params.filterMetadata - Don't return cluster state metadata (default: false)
|
||||
* @param {Date, Number} params.masterTimeout - Explicit operation timeout for connection to master node
|
||||
* @param {Date, Number} params.timeout - Explicit operation timeout
|
||||
@ -785,6 +786,9 @@ api.cluster.prototype.reroute = ca({
|
||||
type: 'boolean',
|
||||
name: 'dry_run'
|
||||
},
|
||||
explain: {
|
||||
type: 'boolean'
|
||||
},
|
||||
filterMetadata: {
|
||||
type: 'boolean',
|
||||
name: 'filter_metadata'
|
||||
@ -843,7 +847,9 @@ api.cluster.prototype.state = ca({
|
||||
'blocks',
|
||||
'metadata',
|
||||
'nodes',
|
||||
'routing_table'
|
||||
'routing_table',
|
||||
'master_node',
|
||||
'version'
|
||||
]
|
||||
},
|
||||
index: {
|
||||
@ -861,7 +867,9 @@ api.cluster.prototype.state = ca({
|
||||
'blocks',
|
||||
'metadata',
|
||||
'nodes',
|
||||
'routing_table'
|
||||
'routing_table',
|
||||
'master_node',
|
||||
'version'
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -164,6 +164,7 @@ HttpConnector.prototype.request = function (params, cb) {
|
||||
request.setNoDelay(true);
|
||||
request.setSocketKeepAlive(true);
|
||||
request.chunkedEncoding = false;
|
||||
request.useChunkedEncodingByDefault = false;
|
||||
|
||||
if (params.body) {
|
||||
request.setHeader('Content-Length', Buffer.byteLength(params.body, 'utf8'));
|
||||
|
||||
@ -331,9 +331,32 @@ YamlDoc.prototype = {
|
||||
paramName = camelName;
|
||||
}
|
||||
|
||||
params[paramName] = (typeof val === 'string' && val[0] === '$') ? this.get(val) : val;
|
||||
// for ercursively traversing the params to replace '$stashed' vars
|
||||
var transformObject = function (vals, val, i) {
|
||||
switch (typeof val) {
|
||||
case 'string':
|
||||
val = (val[0] === '$') ? this.get(val) : val;
|
||||
break;
|
||||
case 'object':
|
||||
val = _.transform(val, transformObject);
|
||||
}
|
||||
vals[i] = val;
|
||||
}.bind(this);
|
||||
|
||||
// start with the initial param, only traverse traversables
|
||||
switch (typeof val) {
|
||||
case 'string':
|
||||
val = (val[0] === '$') ? this.get(val) : val;
|
||||
break;
|
||||
case 'object':
|
||||
val = _.transform(val, transformObject);
|
||||
break;
|
||||
}
|
||||
|
||||
params[paramName] = val;
|
||||
}, {}, this);
|
||||
|
||||
|
||||
expect(clientAction || clientActionName).to.be.a('function');
|
||||
|
||||
if (typeof clientAction === 'function') {
|
||||
|
||||
Reference in New Issue
Block a user