added docs to the repo

This commit is contained in:
Spencer Alger
2013-12-27 16:41:38 -07:00
parent 11c976e9f8
commit 65f9cc7e99
101 changed files with 3907 additions and 63 deletions

1
.gitignore vendored
View File

@ -1,5 +1,4 @@
dist
docs
npm-debug.log
node_modules
scripts/scratch*

1
docs/README.asciidoc Normal file
View File

@ -0,0 +1 @@
= These files are used to build http://www.elasticsearch.org/guide/en/elasticsearch/client/javascript-api/current/index.html

View File

@ -0,0 +1 @@
Perform many index/delete operations in a single API call.

View File

@ -0,0 +1 @@
Clear the scroll request created by specifying the scroll parameter to search.

View File

@ -0,0 +1 @@
Get cluster settings (previously set with `putSettings()`)

View File

@ -0,0 +1 @@
Get a very simple status on the health of the cluster.

View File

@ -0,0 +1,3 @@
Returns information about the hottest threads in the cluster or on a specific node as a String. The information is returned as text, and allows you to understand what are currently the most taxing operations happening in the cluster, for debugging or monitoring purposes.
WARNING: This endpoint returns plain text

View File

@ -0,0 +1 @@
Retrieve one or more (or all) of the cluster nodes' information.

View File

@ -0,0 +1 @@
Shutdown one or more (or all) nodes in the cluster.

View File

@ -0,0 +1 @@
Retrieve one or more (or all) of the cluster nodes statistics.

View File

@ -0,0 +1 @@
Update cluster wide specific settings.

View File

@ -0,0 +1 @@
Explicitly execute a cluster reroute allocation command including specific commands.

View File

@ -0,0 +1 @@
Get comprehensive details about the state of the whole cluster (indices settings, allocations, etc).

View File

@ -0,0 +1 @@
Get the number of documents for the cluster, index, type, or a query.

View File

@ -0,0 +1 @@
Adds a typed JSON document in a specific index, making it searchable. If a document with the same `index`, `type`, and `id` already exists, an error will occur.

View File

@ -0,0 +1 @@
Delete a typed JSON document from a specific index based on its id.

View File

@ -0,0 +1 @@
Delete documents from one or more indices and one or more types based on a query.

View File

@ -0,0 +1 @@
Returns a boolean indicating whether or not a given document exists.

View File

@ -0,0 +1 @@
Provides details about a specific document's score in relation to a specific query. It will also tell you if the document matches the specified query. Also check out http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-percolate.html[percolaters].

View File

@ -0,0 +1 @@
Get a typed JSON document from the index based on its id.

View File

@ -0,0 +1 @@
Get the source of a document by it's index, type and id.

View File

@ -0,0 +1,5 @@
Stores a typed JSON document in an index, making it searchable. When the `id` param is not set, a unique id will be auto-generated. When you specify an `id` either a new document will be created, or an existing document will be updated. To enforce "put-if-absent" behavior set the `opType` to `"create"` or use the `create()` method.
Optimistic concurrency control is performed, when the `version` argument is specified. By default, no version checks are performed.
By default, the document will be available for `get()` actions immediately, but will only be available for searching after an index refresh (which can happen automatically or manually). See <<api-indices-refresh>>.

View File

@ -0,0 +1 @@
Perform the analysis process on a text and return the tokens breakdown of the text.

View File

@ -0,0 +1 @@
Clear either all caches or specific cached associated with one ore more indices.

View File

@ -0,0 +1 @@
Close an index to remove it's overhead from the cluster. Closed index is blocked for read/write operations.

View File

@ -0,0 +1 @@
Create an index in Elasticsearch.

View File

@ -0,0 +1 @@
Delete an index in Elasticsearch

View File

@ -0,0 +1 @@
Delete a specific alias.

View File

@ -0,0 +1 @@
Delete a mapping (type definition) along with its data.

View File

@ -0,0 +1 @@
Delete an index template by its name.

View File

@ -0,0 +1 @@
Delete an index warmer.

View File

@ -0,0 +1 @@
Return a boolean indicating whether given index exists.

View File

@ -0,0 +1 @@
Return a boolean indicating whether given alias exists.

View File

@ -0,0 +1 @@
Check if a type/types exists in an index/indices.

View File

@ -0,0 +1 @@
Explicitly flush one or more indices.

View File

@ -0,0 +1 @@
Retrieve a specified alias.

View File

@ -0,0 +1 @@
Retrieve specified aliases

View File

@ -0,0 +1 @@
Retrieve mapping definition of a specific field.

View File

@ -0,0 +1 @@
Retrieve mapping definition of index or index/type.

View File

@ -0,0 +1 @@
Retrieve settings for one or more (or all) indices.

View File

@ -0,0 +1 @@
Retrieve an index template by its name.

View File

@ -0,0 +1 @@
Retreieve an index warmer.

View File

@ -0,0 +1 @@
Open a closed index, making it available for search.

View File

@ -0,0 +1 @@
Explicitly optimize one or more indices.

View File

@ -0,0 +1 @@
Create an alias for a specific index/indices.

View File

@ -0,0 +1 @@
Register specific mapping definition for a specific type.

View File

@ -0,0 +1 @@
Change specific index level settings in real time.

View File

@ -0,0 +1 @@
Create an index template that will automatically be applied to new indices created.

View File

@ -0,0 +1 @@
Create an index warmer to run registered search requests to warm up the index before it is available for search.

View File

@ -0,0 +1 @@
Explicitly refresh one or more index, making all operations performed since the last refresh available for search.

View File

@ -0,0 +1 @@
Retrieve low level segments information that a Lucene index (shard level) is built with.

View File

@ -0,0 +1 @@
Initiate a snapshot through the gateway of one or more indices.

View File

@ -0,0 +1 @@
Retrieve statistics on different operations happening on an index.

View File

@ -0,0 +1 @@
Get a comprehensive status information of one or more indices.

View File

@ -0,0 +1 @@
Update specified aliases.

View File

@ -0,0 +1 @@
Validate a potentially expensive query without executing it.

View File

@ -0,0 +1 @@
Get basic info from the current cluster.

View File

@ -0,0 +1 @@
Get multiple documents based on an index, type (optional) and ids. The body required by mget can take two forms: an array of document locations, or an array of document ids.

View File

@ -0,0 +1 @@
(more like this) Gets more documents that are “like” the document specified using `index`, `type`, and `id`.

View File

@ -0,0 +1 @@
Execute several search requests within the same request.

View File

@ -0,0 +1 @@
Match a document against registered percolator queries.

View File

@ -0,0 +1 @@
Scroll a search request (retrieve the next set of results) after specifying the scroll parameter in a `search()` call.

View File

@ -0,0 +1,4 @@
Return documents matching a query, aggregations/facets, highlighted snippets, suggestions, and more. Write your queries as either http://www.elasticsearch.org/guide/reference/api/search/uri-request/[simple query strings] in the `q` parameter, or by specifying a http://www.elasticsearch.org/guide/reference/api/search/request-body/[full request definition] using the http://www.elasticsearch.org/guide/reference/query-dsl/[Elasticsearch Query DSL] in the `body` parameter.
TIP: https://github.com/fullscale/elastic.js[elastic.js] can be used to make building query bodies easier.

View File

@ -0,0 +1 @@
The suggest feature suggests similar looking terms based on a provided text by using a specific suggester.

View File

@ -0,0 +1,4 @@
Update parts of a document. The required body parameter can contain one of two things:
* a partial document, which will be merged with the existing one.
* a `script` which will update the document content

View File

@ -0,0 +1,21 @@
.Perform three operations in a single request
[source,js]
---------
client.bulk({
body: [
// action description
{ index: { _index: 'myindex', _type: 'mytype', _id: 1 } },
// the document to index
{ title: 'foo' },
// action description
{ update: { _index: 'myindex', _type: 'mytype', _id: 2 } },
// the document to update
{ doc: { title: 'foo' } },
// action description
{ delete: { _index: 'myindex', _type: 'mytype', _id: 3 } },
// no document needed for this delete
]
}, function (err, resp) {
// ...
});
---------

View File

@ -0,0 +1,11 @@
.Return 10 hottest threads
[source,js]
---------
client.cluster.nodeHotThreads({
threads: 10
nodeId: 'mymisbehavingnode',
maxRetries: 10
}, function (error, response) {
console.log(response);
})
---------

View File

@ -0,0 +1,10 @@
.Return information about JVM
[source,js]
---------
client.cluster.nodeInfo({ jvm: true })
.then(function (response) {
// enjoy your sweet info!
}, function (error) {
// scream!
})
---------

View File

@ -0,0 +1,37 @@
.Get the number of all documents in the cluster
[source,js]
---------
client.count(function (error, response, status) {
// check for and handle error
var count = response.count;
});
---------
.Get the number of documents in an index
[source,js]
---------
client.count({
index: 'index_name'
}, function (error, response) {
// ...
});
---------
.Get the number of documents matching a query
[source,js]
---------
client.count(
index: 'index_name',
body: {
filtered: {
filter: {
terms: {
foo: ['bar']
}
}
}
}
}, function (err, response) {
// ...
});
---------

View File

@ -0,0 +1,18 @@
.Create a document
[source,js]
---------
client.create({
index: 'myindex',
type: 'mytype',
id: '1',
body: {
title: 'Test 1',
tags: ['y', 'z'],
published: true,
published_at: '2013-01-01',
counter: 1
}
}, function (error, response) {
// ...
});
---------

View File

@ -0,0 +1,11 @@
.Delete the document `/myindex/mytype/1`
[source,js]
---------
client.delete({
index: 'myindex',
type: 'mytype',
id: '1'
}, function (error, response) {
// ...
});
---------

View File

@ -0,0 +1,23 @@
.Deleting documents with a simple query
[source,js]
---------
client.deleteByQuery({
index: 'myindex',
q: 'test'
}, function (error, response) {
// ...
});
---------
.Deleting documents using the Query DSL
[source,js]
---------
client.delete_by_query({
index: 'posts',
body: {
term: { published: false }
}
}, function (error, response) {
// ...
});
---------

View File

@ -0,0 +1,15 @@
.Check that the document `/myindex/mytype/1` exits
[source,js]
---------
client.exists({
index: 'myindex',
type: 'mytype',
id: 1
}, function (error, exists) {
if (exists === true) {
// ...
} else {
// ...
}
});
---------

View File

@ -0,0 +1,32 @@
.See how a document is scored against a simple query
[source,js]
---------
client.explain({
// the document to test
index: 'myindex',
type: 'mytype',
id: '1',
// the query to score it against
q: 'field:value'
}, function (error, response) {
// ...
});
---------
.See how a document is scored against a query written in the Query DSL
[source,js]
---------
client.explain({
index: 'myindex',
type: 'mytype',
id: '1',
body: {
query: {
match: { title: 'test' }
}
}
}, function (error, response) {
// ...
});
---------

View File

@ -0,0 +1,11 @@
.Get `/myindex/mytype/1`
[source,js]
---------
client.get({
index: 'myindex',
type: 'mytype',
id: 1
}, function (error, response) {
// ...
});
---------

View File

@ -0,0 +1,16 @@
.Create or update a document
[source,js]
---------
client.index({
index: 'myindex',
type: 'mytype',
id: '1',
body: {
title: 'Test 1',
tags: ['y', 'z'],
published: true,
}
}, function (error response) {
});
---------

View File

@ -0,0 +1,29 @@
.An array of doc locations. Useful for getting documents from different indices.
[source,js]
---------
client.mget({
body: {
docs: [
{ _index: 'indexA', _type: 'typeA', _id: '1' },
{ _index: 'indexB', _type: 'typeB', _id: '1' },
{ _index: 'indexC', _type: 'typeC', _id: '1' }
]
}
}, function(error, response){
// ...
});
---------
.An array of ids. You must also specify the `index` and `type` that apply to all of the ids.
[source,js]
---------
client.mget({
index: 'myindex',
type: 'mytype',
body: {
ids: [1, 2, 3]
}
}, function(error, response){
// ...
});
---------

View File

@ -0,0 +1,12 @@
.Search for similar documents using the `title` property of document `myindex/mytype/1`
[source,js]
---------
client.mlt({
index: 'myindex',
type: 'mytype',
id: 1,
mlt_fields: 'title'
}, function (errors, response) {
// ...
});
---------

View File

@ -0,0 +1,15 @@
.Perform multiple different searches, the body is made up of meta/data pairs
[source,js]
---------
client.msearch({
body: [
// match all query, on all indices and types
{}
{ query: { match_all: {} } },
// query_string query, on index/mytype
{ index: 'myindex', type: 'mytype' },
{ query: { query_string: { query: '"Test 1"' } } }
]
});
---------

View File

@ -0,0 +1,69 @@
.First, Register queries named “alert-1” and “alert-2” for the “myindex” index
[source,js]
---------
client.index({
index: '_percolator',
type: 'myindex',
id: 'alert-1',
body: {
// This query will be run against documents sent to percolate
query: {
query_string: {
query: 'foo'
}
}
}
}, function (error, response) {
// ...
});
client.index({
index: '_percolator',
type: 'myindex',
id: 'alert-2',
body: {
// This query will also be run against documents sent to percolate
query: {
query_string: {
query: 'bar'
}
}
}
}, function (error, response) {
// ...
});
---------
.Then you can send documents to learn which query `_percolator` queries they match
[source,js]
---------
client.percolate({
index: 'myindex',
body: {
doc: {
title: "Foo"
}
}
}, function (error, response) {
// response would equal
// {
// ok:true,
// matches: [ "alert-1" ]
// }
});
client.percolate({
index: 'myindex',
body: {
doc: {
title: "Foo Bar"
}
}
}, function (error, response) {
// response would equal
// {
// ok:true,
// matches: [ "alert-1", "alert-2" ]
// }
});
---------

View File

@ -0,0 +1,29 @@
.Collect every title in the index that contains the word "test"
[source,js]
---------
var allTitles = [];
// first we do a search, and specify a scroll timeout
client.search({
index: 'myindex',
// Set to 30 seconds because we are calling right back
scroll: '30s',
fields: ['title'],
q: 'title:test'
}, function getMoreUntilDone(error, response) {
// collect the title from each response
response.hits.hists.forEach(function (hit) {
allTitles.push(hit.fields.title);
});
if (response.hits.total !== allTitles.length) {
// now we can call scroll over and over
client.scroll({
scrollId: response._scroll_id,
scroll: '30s'
}, getMoreUntilDone);
} else {
console.log('every "test" title', allTitles);
}
});
---------

View File

@ -0,0 +1,34 @@
.Search with a simple query string query
[source,js]
---------
client.search({
index: 'myindex',
q: 'title:test'
}, function (error, response) {
// ...
});
---------
.Passing a full request definition in the Elasticsearch's Query DSL as a `Hash`
[source,js]
---------
client.search({
index: 'myindex',
body: {
query: {
match: {
title: 'test'
}
},
facets: {
tags: {
terms: {
field: 'tags'
}
}
}
}
}, function (error, response) {
// ...
}):
---------

View File

@ -0,0 +1,34 @@
.Return query terms suggestions (“auto-correction”)
[source,js]
---------
client.suggest({
index: 'myindex',
body: {
mysuggester: {
text: 'tset',
term: {
field: 'title'
}
}
}
}, function (error, response) {
// response will be formatted like so:
//
// {
// ...
// mysuggester: [
// {
// text: "tset",
// ...
// options: [
// {
// text: "test",
// score: 0.75,
// freq: 5
// }
// ]
// }
// ]
// }
});
---------

View File

@ -0,0 +1,69 @@
.Update document title using partial document
[source,js]
---------
client.update({
index: 'myindex',
type: 'mytype',
id: '1',
body: {
// put the partial document under the `doc` key
doc: {
title: 'Updated'
}
}
}, function (error, response) {
// ...
})
---------
.Add a tag to document `tags` property using a `script`
[source,js]
---------
client.update({
index: 'myindex',
type: 'mytype',
id: '1',
body: {
script: 'ctx._source.tags += tag',
params: { tag: 'some new tag' }
}
}, function (error, response) {
// ...
});
---------
.Increment a document counter by 1 or initialize it, when the document does not exist
[source,js]
---------
client.update({
index: 'myindex',
type: 'mytype',
id: '666',
body: {
script: 'ctx._source.counter += 1',
upsert: {
counter: 1
}
}
}, function (error, response) {
// ...
})
---------
.Delete a document if it's tagged “to-delete”
[source,js]
---------
client.update({
index: 'myindex',
type: 'mytype',
id: '1',
body: {
script: 'ctx._source.tags.contains(tag) ? ctx.op = "delete" : ctx.op = "none"',
params: {
tag: 'to-delete'
}
}
}, function (error, response) {
// ...
});
---------

59
docs/about.asciidoc Normal file
View File

@ -0,0 +1,59 @@
[[about]]
== About
=== Features
* One-to-one mapping with REST API
* Configurable, automatic discovery of cluster nodes
* Persistent, Keep-Alive connections
* Intelligent handling of node/connection failure
* Load balancing (with plug-able selection strategy) across all available nodes.
* Works great in node, as well as modern browsers (many thanks to https://github com/substack/node-browserify[browserify]!!).
* Generalized, plug-able, and highly configurable architecture. You can change anything! See <<extending_core_components>>
=== Install in Node
[source,shell]
--------
npm install --save elasticsearch
--------
=== Browser Builds
To download a build of elasticsearch.js which functions well within modern browsers, use the links
below. These versions of the client are currently ***experimental***. They will break from time to time
and should probably not be used on public-facing websites (it's a whopping 150kb of code).
* v1.1.0 [https://download.elasticsearch.org/elasticsearch/elasticsearch-js/elasticsearch-js-1.1.0.zip[zip]] [https://download.elasticsearch.org/elasticsearch/elasticsearch-js/elasticsearch-js-1.1.0.tar.gz[tarball]]
* master [https://download.elasticsearch.org/elasticsearch/elasticsearch-js/elasticsearch-js-master.zip[zip]] [https://download.elasticsearch.org/elasticsearch/elasticsearch-js/elasticsearch-js-master.tar.gz[tarball]]
WARNING: The entire API is compatible with IE 10+, Chrome, Firefox, Safari, and Opera. The majority of the API will
also work in IE 8 &amp; 9, but those browsers limit cross domain requests to just GET and POST. IE versions
before 8 do not support cross-domain requests nativly.
==== Angular Build (elasticsearch.angular.js)
* Registers the elasticsearch object as a factory `esFactory`
* Uses Angular's `$http` service
* Returns promises using Angular's `$q` service to properly trigger digest cycles within Angular
.Create a client instance and register it as a service
[source,js]
-------------------
module.service('es', function (esFactory) {
return esFactory({
host: 'localhost:9200',
// ...
});
});
-------------------
==== jQuery Build (elasticsearch.jquery.js)
* Uses jQuery's `.ajax()` functionality
* Returns jQuery "promises"
* Registers the module at `jQuery.es`
.Create a client with the jQuery build
[source,js]
-------------------
var client = new $.es.Client({
hosts: 'localhost:9200'
});
-------------------

27
docs/api_conventions.asciidoc Executable file
View File

@ -0,0 +1,27 @@
[[api-conventions]]
== API Conventions
=== Generic Parameters
By default, all api methods accept the following parameters. They are omitted from the param lists of each method, just because.
[horizontal]
`method`::
+
`String` -- The HTTP method to use for this request. All of the API methods have their own default.
`body`::
`String, Anything` -- The body to send along with this request. If the body is a string it will be passed along as is, otherwise it is passed to the serializer and converted to either JSON or a newline seperated list of JSON objects based on the API method.
+
NOTE: the https://github.com/fullscale/elastic.js[elastic.js] library can be used to make building request bodies simpler.
`ignore`::
+
`Number, Number[]` -- HTTP status codes which should not be considered errors for this request.
=== Config values you can override per request
* `requestTimeout` -- <<config-request-timeout, more info>>
* `maxRetries` -- <<config-max-retries, more info>>
=== Callbacks or Promises
When a callback is passed to any of the API methods, it will be called with `(err, response, status)`. If you prefer to use promises, don't pass a callback and a promise will be returned. The promise will either be resolved with the response body, or rejected with the error that occured (including any 300+ response for non "exists" methods).
Both styles of calling the API will return an object (either a promise or just a plain object) which has an `abort()` method. Calling that abort method ends the HTTP request, but it will not end the work Elasticsearch is doing.

2495
docs/api_methods.asciidoc Normal file

File diff suppressed because it is too large Load Diff

246
docs/configuration.asciidoc Normal file
View File

@ -0,0 +1,246 @@
[[configuration]]
== Configuration
The `Client` constructor accepts a single object as it's argument, and the following keys can be used to configure that client instance.
[source,js]
------
var elasticsearch = require('elasticsearch');
var client = new elasticsearch.Client({
...
});
------
=== Config options
[horizontal]
`host or hosts`[[config-hosts]]::
`String, String[], Object[]` -- Specify the hosts that this client will connect to. If sniffing is enabled, or you call `client.sniff()`, this list will be used as seeds to discover the rest of your cluster.
Default:::
+
[source,js]
------
'http://localhost:9200'
------
`log`[[config-log]]:: `String, String[], Object, Object[], Constructor` -- Unless a constructor is specified, this sets the output settings for the bundled logger. See the section on configuring-logging[logging] for more information.
Default in Node:::
+
[source,js]
-----
[{
type: 'stdio',
levels: ['error', 'warning']
}]
-----
`connectionClass`[[config-connectionClass]]:: `String, Constructor` -- Defines the class that will be used to create connections to store in the connection pool. If you are looking to implement additional protocols you should probably start by writing a Connection class that extends the ConnectionAbstract.
Defaults:::
* Node: `"http"`
* Browser Build: `"xhr"`
* Angular Build: `"angular"`
* jQuery Build: `"jquery"`
`selector`:: `String, Function` -- This function will be used to select a connection from the ConnectionPool. It should received a single argument, the list of "active" connections, and return the connection to use. Use this selector to implement special logic for your client such as preferring nodes in a certain rack or data-center.
+
To make this function asynchronous, accept a second argument which will be the callback to use. The callback should be called Node-style with a possible error like: `cb(err, selectedConnection)`.
Default::: `"roundRobin"`
Options:::
* `"roundRobin"`
* `"random"`
`sniffOnStart`:: `Boolean` -- Should the client attempt to detect the rest of the cluster when it is first instantiated?
Default::: `false`
`sniffInterval`:: `Number, false` -- Every `n` milliseconds, perform a sniff operation and make sure our list of nodes is complete.
Default::: `false`
`sniffOnConnectionFault`:: `Boolean` -- Should the client immediately sniff for a more current list of nodes when a connection dies?
Default::: `false`
`maxRetries`[[config-max-retries]]:: `Integer` -- How many times should the client try to connect to other nodes before returning a <<connection-fault,ConnectionFault>> error.
Default::: `3`
`requestTimeout`[[config-request-timeout]]:: `Number` -- Milliseconds before an HTTP request will be aborted and retried. This can also be set per request.
Default::: `30000`
`deadTimeout`:: `Number` -- Milliseconds that a dead connection will wait before attempting to revive itself.
Default::: `30000`
`maxSockets`:: `Number` -- Number of sockets each connection should keep to it's corresponding node. This will also be the maximum number of concurrent requests that could be made to that node. These sockets are currently kept alive using https://github.com/TBEDP/agentkeepalive[agentkeepalive].
Default::: `10`
`maxKeepAliveTime`:: `Number, false` -- Milliseconds of inactivity before the socket is destroyed
Default::: `60000`
`defer`:: `Function` -- Override the way that the client creates promises. If you would rather use any other promise library this is how you'd do that. Elasticsearch.js expects that the defer object has a `promise` property (which will be returned to promise consumers), as well as `resolve` and `reject` methods.
Default:::
+
[source,js]
-----
function () {
return when.defer();
}
-----
`nodesToHostCallback`:: `Function` - This function will receive the list of nodes returned from the `_cluster/nodes` API during a sniff operation. The function should return an array of objects which match the <<config-hosts,specification for the `hosts` config>>.
Default:::
see https://github.com/elasticsearch/elasticsearch-js/blob/master/src/lib/nodes_to_host.js[nodes_to_host.js]
=== Examples
Connect to just a single seed node, and use sniffing to find the rest of the cluster.
[source,js]
-----
var client = new elasticsearch.Client({
host: 'localhost:9200',
sniffOnStart: true,
sniffInterval: 60000,
});
-----
Specify a couple of hosts which use basic auth.
[source,js]
-----
var client = new elasticsearch.Client({
hosts: [
'https://user:pass@box1.server.org:9200',
'https://user:pass@box2.server.org:9200'
]
});
-----
Use host objects to define extra properties, and a selector that uses those properties to pick a node.
[source,js]
-----
var client = new elasticsearch.Client({
hosts: [
{
protocol: 'https',
host: 'box1.server.org',
port: 56394,
country: 'EU',
weight: 10
},
{
protocol: 'https',
host: 'box2.server.org',
port: 56394,
country: 'US',
weight: 50
}
],
selector: function (hosts) {
var myCountry = process.env.COUNTRY;
// first try to find a node that is in the same country
var selection = _.find(nodes, function (node) {
return node.host.country === myCountry;
});
if (!selection) {
// choose the node with the smallest weight.
selection = _(nodes).sortBy(function (node) {
return node.host.weight;
}).first();
}
return selection;
}
});
-----
.Use a custom nodesToHostCallback that will direct all of the requests to a proxy and select the node via a query string param.
[source,js]
-----
var client = new elasticsearch.Client({
nodesToHostCallback: function (nodes) {
/*
* The nodes object will look something like this
* {
* "y-YWd-LITrWXWoCi4r2GlQ": {
* name: "Supremor",
* transport_address: "inet[/192.168.1.15:9300]",
* hostname: "Small-ESBox.infra",
* version: "1.0.0",
* http_address: "inet[/192.168.1.15:9200]",
* attributes: {
* custom: "attribute"
* }
* },
* ...
* }
*/
return _.transform(nodes, function (nodeList, node, id) {
var port = node.http_address.match(/:(\d+)/)[1];
nodeList.push({
host: 'esproxy.example.com',
port: 80,
query: {
nodeHostname: node.hostname,
nodePort: port
}
});
}, []);
}
})
-----

View File

@ -0,0 +1,3 @@
[[contributing]]
== Development/Contributing
Contributions are awesome, but please read https://github.com/elasticsearch/elasticsearch-js/blob/master/CONTRIBUTING.md before submitting a pull request.

18
docs/errors.asciidoc Normal file
View File

@ -0,0 +1,18 @@
[[errors]]
== Error Reference
These are the standard Error types which may be passed back from the client. To access the constructors is provided via `require('elasticsearch').errors`.
[horizontal]
[[connection-fault]]
`ConnectionFault`:: The connection was unable to initiate or complete a request with the Elasticsearch node.
`NoConnections`:: All of the connections in the ConnectionPool are dead.
`RequestTimeout`:: The request timed-out.
`Serialization`:: The response received from Elasticsearch could not be deserilaized.
`503` or `ServiceUnavailable`:: Elasticsearch responded with a 503 status.
`500` or `InternalServerError`:: Elasticsearch responded with a 500 status.
`412` or `PreconditionFailed`:: Elasticsearch responded with a 412 status.
`409` or `Conflict`:: Elasticsearch responded with a 409 status.
`403` or `Forbidden`:: Elasticsearch responded with a 403 status.
`404` or `NotFound`:: Elasticsearch responded with a 404 status.
`400` or `BadRequest`:: Elasticsearch responded with a 400 status.
`Generic`:: Elasticsearch responded with a status that does not map to it's own error type.

View File

@ -0,0 +1,17 @@
[[extending_core_components]]
== Extending Core Components
We decided to make this client low-level, and as such we probably have not implemented all the features you are looking for. For this reason, we made extending or even replacing the core components simple.
=== Connection
Coming Soon
=== ConnectionPool
Coming Soon
=== Log
see <<logging>>.
=== Client/API
The Client's only real purpose (as you may be able to tell from client.js) is to hold the API methods, set a few default values, and instantiate the transport. The transport is where all the networking, retry, and cluster discovery takes place and including it in your client is as simple as `transport = new es.Transport({});`. This way, you can benefit from the core features of our client.
NOTE: In the near future the entire transport level will be abstracted into a separate module, as well as the API.

19
docs/index.asciidoc Normal file
View File

@ -0,0 +1,19 @@
= elasticsearch.js
include::about.asciidoc[]
include::quick_start.asciidoc[]
include::api_conventions.asciidoc[]
include::api_methods.asciidoc[]
include::configuration.asciidoc[]
include::extending_core_components.asciidoc[]
include::logging.asciidoc[]
include::development.asciidoc[]
include::errors.asciidoc[]

141
docs/logging.asciidoc Normal file
View File

@ -0,0 +1,141 @@
[[logging]]
== setup logging
Every application needs to have some solution for logging, and there isn't a standard in JavaScript, so instead of forcing you to rely on a specific logging module we created a bare bones logging solution and <<logging-customization>> will show you how to configure it. That said, our implementation of logging is very minimal and ***it is highly recommended that you use something like https://github.com/trentm/node-bunyan[Bunyan] once you move to production***.
=== Using A Library
When the client receives a function for the `log:` config value, it expects that the function is a constructor for a custom log class. This is the simplest way to integrate other logging libraries into the elasticsearch client at this time. The contract for this Log class is pretty straight-forward. See https://github.com/elasticsearch/elasticsearch-js/blob/master/src/lib/log.js[our implementation] for additional details.
==== `new Constructor(config)`
* `config` -- The object that was passed to the client constructor, use to determine the log level.
==== `error(error)`
* error -- `Error` The error that occurred
==== `warning(message)`
* message -- `String` The message to be logged
==== `info(message)`
* message -- `String` The message to be logged
==== `debug(message)`
* message -- `String` The message to be logged
==== `trace(httpMethod, requestUrl, requestBody, responseBody, responseStatus)`
Called after every HTTP request.
* httpMethod -- `String` The request's HTTP method
* requestUrl -- `Object, String` Depending on the connector in use, this will either be a url string or the object passed to node's http.request.
* requestBody -- `String, false-y` The body of the http request, if the body is false-y no body was sent
* responseStatus -- `Integrer, false-y` The HTTP response status
=== Bunyan Example
In the future we may add loggers for some of the more common libraries, but for now this is an exercise for the user. Here is a hint to get you started implementing a https://github.com/trentm/node-bunyan[Bunyan] log class. Be sure to check out the Bunyan repo for more info about setting things up.
.in log_to_bunyan.js
[source,js]
----------------
module.exports = LogToBunyan;
var bunyan = require('bunyan');
function LogToBunyan(config) {
// config is the object passed to the client constructor.
var bun = bunyan.createLogger({name: 'mylogger'});
this.error = bun.error.bind(bun);
this.warning = bun.warn.bind(bun);
this.info = bun.info.bind(bun);
this.debug = bun.debug.bind(bun);
this.trace = function (method, requestUrl, body, responseBody, responseStatus) {
bun.trace({
method: method,
requestUrl: requestUrl,
body: body,
responseBody: responseBody,
responseStatus: responseStatus
});
};
this.close = function () { /* bunyan's loggers do not need to be closed */ };
}
----------------
.in model.js
[source,js]
----------------
var elasticsearch = require('elasticsearch');
var LogClass = require('./log_to_bunyan');
// now just pass the log class to the client constructor using the "log" config option.
var client = new elasticsearch.Client({ log: LogClass });
----------------
[[logging-customization]]
== Using the default loggers
By default, the client creates a `"warning"` level, Console or Stdio logger. To change this, specify the client's `log:` config value to either an array of logger config's, a single logger config, a log level, an array of log levels, or a constructor for your own logger. That's a lot of options, so here is an example of each.
.Change the logging level to trace, so we get every log message
[source,js]
----------------
var client = new elasticsearch.Client({ log: 'trace' });
----------------
.Change the logging level, only listen for error and trace messages
[source,js]
----------------
var client = new elasticsearch.Client({ log: ['error', 'trace'] });
----------------
.Log every message to a file
[source,js]
----------------
var client = new elasticsearch.Client({
log: {
type: 'file',
level: 'trace',
path: '/var/log/elasticsearch.log'
}
});
----------------
.Log everything to a file and errors to a socket
[source,js]
----------------
var client = new elasticsearch.Client({
log: [
{
type: 'stream',
level: 'error',
// config option specific to stream type loggers
stream: mySocket
},
{
type: 'file',
level: 'trace',
// config options specific to file type loggers
path: '/var/log/elasticsearch.log'
}
]
});
----------------
=== Logger Types
==== "stdio"
The default logger for in Node, writes log messages for "info", "debug", and "trace" to stdout and "error" and "warning" to stderr.
===== Options
* `[color=false]` -- `Boolean` Write with a bit of flair. The default value is intelligently chosen by https://github.com/sindresorhus/chalk[chalk] based on the details of your environment.
==== "file"
Append the log messages to a file.
===== Options
* `[path="elasticsearch.log"]` -- `String` Location of the file. It is created if it does not exists
==== "stream"
Send log messages to a <a href="http://nodejs.org/api/stream.html#stream_class_stream_writable">WriteableStream</a>
===== Options
* `stream` -- `WriteableStream` The object to write to.
==== "console"
Default logger for the browser build, logs to the console when one exists.

158
docs/quick_start.asciidoc Normal file
View File

@ -0,0 +1,158 @@
[[quick-start]]
== Quick Start
=== Creating a client
Start using Elasticsearch.js by creating an instance of the `elasticsearch.Client` class. The constructor accepts a config object/hash where you can define defaults values, or even entire classes, for the client to use. For a full list of config options check out the the <<configuration,section deicated to configuration>>.
[source,js]
-----------------
var elasticsearch = require('elasticsearch');
var client = new elasticsearch.Client({
host: 'localhost:9200',
log: 'trace'
});
-----------------
=== Say hello to Elasticsearch
Almost all of the methods on the client accept two arguments:
* `params` - an optional object/hash of parameters <<api-conventions,More info here>>.
* `callback` - an optional function that will be called with the final result of the method. When omitted, a https://github.com/cujojs/when/blob/master/docs/api.md#promise[promise] is returned. api-conventions-cb[More info here].
==== Ping the cluster
.Send a HEAD request to "/?hello=elasticsearch" and allow up to 1 second for it to complete.
[source,js]
-----------------
client.ping({
requestTimeout: 1000,
// undocumented params are appended to the query string
hello: "elasticsearch!"
}, function (error) {
if (error) {
console.error('elasticsearch cluster is down!');
} else {
console.log('All is well');
}
});
-----------------
==== Use Promises
.Skip the callback to get a promise back
[source,js]
-----------------
client.search({
q: 'pants'
}).then(function (body) {
var hits = body.hits.hits;
}, function (error) {
// freak out!
});
-----------------
==== Allow 404 responses
.Prevent 404 responses from being considered errors by telling the client to ignore them.
[source,js]
-----------------
client.indices.delete({
index: 'test_index',
ignore: [404]
}).then(function (body) {
// since we told the client to ignore 404 errors, the
// promise is resolved even if the index does not exist
console.log('index was deleted or never existed');
}, function (error) {
// oh no!
});
-----------------
=== Searching for documents
A very common use-case for elasticsearch is to sort through through large collections of documents in order to find ones that are relavent to a query. In most cases you will use the client's `search()` method to accomplish this.
==== Elasticsearch Query DSL
For many searches you will want to define a search document that tells elasticsearch exactly how to find the documents you are looking for. To do this you will use the http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl.html[elasticsearch query DSL]. If you are not familiary with Elasticsearch's query DSL is it recommended that you research the topic at elasticsearch.org or watch/read one of these introductions:
* https://www.youtube.com/watch?v=52G5ZzE0XpY#t=1471[Clinton Gormley "Getting down and dirty with Elasticsearch"]
* http://okfnlabs.org/blog/2013/07/01/elasticsearch-query-tutorial.html#query-dsl-overview[Querying Elasticsearch - A Tutorial and Guide]
* http://exploringelasticsearch.com/book/searching-data/the-query-dsl-and-the-search-api.html[The Query DSL and the Search API - Searching Data - Exploring Elasticsearch]
Now for some examples using the Query DSL.
===== Simple match query
[source,js]
-----------------
// match tweets that have "elasticsearch"
// in their body field
client.search({
index: 'twitter',
type: 'tweets',
body: {
query: {
match: {
body: 'elasticsearch'
}
}
}
});
-----------------
===== More complex filtered query
To power a search form on a public site, you might want to allow the user to specify some text but also limit the documents returned by a few criteria. This is a good use-case for a filtered query.
NOTE: In this example, `request` and `response` are http://expressjs.com/api.html#request[Express] request and response objects.
[source,js]
-----------------
var pageNum = request.param('page', 0);
var perPage = request.param('per_page', 15);
var userQuery = request.param('search_query');
var userId = request.session.userId;
client.search({
index: 'posts',
from: (pageNum - 1) * perPage,
size: perPage,
body: {
filtered: {
query: {
match: {
// match the query agains all of
// the fields in the posts index
_all: userQuery
}
},
filter: {
// only return documents that are
// public or owned by the current user
or: [
{
term: { privacy: "public" }
},
{
term: { owner: userId }
}
]
}
}
}
}, function (error, response) {
if (err) {
// handle error
return;
}
response.render('search_results', {
results: response.hits.hits,
page: pageNum,
pages: Math.ceil(response.hists.total / perPage)
})
});
-----------------
You can find a lot more information about filters http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-filters.html[here]

24
docs/transport.asciidoc Normal file
View File

@ -0,0 +1,24 @@
[[transport-reference]]
== Transport
=== request(params, [callback])
==== Params
[horizontal]
`path`::
`String` -- The path/endpoint for the request
`query`::
`Object` -- The query string params
`path`::
`Object` -- The query string params
`ignore`::
`Number, Number[]` -- HTTP status codes which should not be treated as errors
`requestTimeout`::
`Number` -- Milliseconds this request has to complete. The default can be set using the client's `requestTimeout:` config parameter.
`method`::
`String` -- The HTTP method to use for this request. All of the API methods have their own default.

View File

@ -24,7 +24,6 @@ module.exports = function (done) {
writeApiFile,
ensureDocsDir,
formatDocVars,
writeMethodList,
writeMethodDocs
], done);
@ -101,17 +100,9 @@ module.exports = function (done) {
done();
}
function writeMethodList(done) {
fs.writeFile(
'../../docs/_method_list.jade',
templates.apiMethodList(docVars),
done
);
}
function writeMethodDocs(done) {
fs.writeFile(
'../../docs/_methods.jade',
'../../docs/api_methods.asciidoc',
templates.apiMethods(docVars),
done
);

View File

@ -1,32 +0,0 @@
<%
function esc(str) {
return str.replace(/\|/g, '&#124;');
}
var topActions = [];
var names = {};
_.each(actions, function (action) {
if (action.name.indexOf('.') > -1) {
var space = _.studlyCase(action.name.split('.').slice(0, -1).join('.'));
if (!names[space]) {
names[space] = [];
}
names[space].push(action);
} else {
topActions.push(action);
}
}); %>
ul<%
_.each(topActions, function (action) {%>
li: a(href="api.html#<%= action.name.toLowerCase().replace(/[^\w]+/g, '-') %>") <%= action.name %><%
});
_.each(Object.keys(names).sort(), function (namespace) {%>
h3 <%= namespace %>
ul<%
_.each(names[namespace], function (action) {%>
li: a(href="api.html#<%= action.name.toLowerCase().replace(/[^\w]+/g, '-') %>") <%= action.name.replace(/^.*\./, '') %><%
})
})
%>

View File

@ -1,27 +1,34 @@
== API Method Reference
<%
_.each(actions, function (action) {
var actionId = action.name.toLowerCase().replace(/[^\w]+/g, '-');
var actionId = 'api-' + action.name.toLowerCase().replace(/[^\w]+/g, '-');
%>
h2#<%= actionId %>.fn <%= action.name %>(params, [callback])
include _descriptions/<%= action.name %>.jade
p.
The default method is <code><%= action.spec.method || 'GET' %></code> and
the usual <a href="#api-conventions">params and return values</a> apply.
See <a href="<%= action.docUrl %>" title="<%= action.name %>
at elasticsearch.org"><%= action.docUrl %></a> for more about this method.
include _examples/<%= action.name %>.jade
[[<%= actionId %>]]
=== `<%= action.name %>`
[source,js]
--------
client.<%= action.name %>([params, [callback]])
--------
<%= description(action.name) %>
The default method is `<%= action.spec.method || 'GET' %>` and the usual <<api-conventions,params and return values>> apply. See <%= action.docUrl %>[the elasticsearch docs] for more about this method.
<%= examples(action.name) %>
<% if (_.size(action.allParams)) { %>
h3 Params
dl.params.api
<% _.each(action.allParams, function (param, paramName) { %>
dt: dfn: code <%= paramWithDefault(paramName, param.default) %>
dd.
<span class="types"><%= paramType(param.type) %></span>
<%= indent(param.description || '', 4) %><%
}); %>
<% }
==== Params
});
[horizontal]<%
_.each(action.allParams, function (param, paramName) { %>
`<%= paramWithDefault(paramName, param.default) %>`::
`<%= paramType(param.type) %>` -- <%= joinParagraphs(param.description || '', 4) %><%
}); // endeach
} // endif
}); // endeach
%>

View File

@ -49,6 +49,34 @@ var templateGlobals = {
}).join('\n');
},
joinParagraphs: function (block) {
return block.split('\n\n').join('\n+\n');
},
description: function (action) {
try {
return fs.readFileSync(path.join(__dirname, '../../../docs/_descriptions/' + action + '.asciidoc'));
} catch (e) {
if (~e.message.indexOf('ENOENT')) {
return '// no description';
} else {
throw e;
}
}
},
examples: function (action) {
try {
return fs.readFileSync(path.join(__dirname, '../../../docs/_examples/' + action + '.asciidoc'));
} catch (e) {
if (~e.message.indexOf('ENOENT')) {
return '// no examples';
} else {
throw e;
}
}
},
paramType: function (type) {
switch (type && type.toLowerCase ? type.toLowerCase() : 'any') {
case 'time':

72
scripts/sync_examples.js Normal file
View File

@ -0,0 +1,72 @@
var async = require('async');
var fs = require('fs');
var S = require('string');
var restSpecDir = './src/rest-api-spec/api/';
function fileExists(path, done) {
fs.stat(path, function (err, stats) {
var exists;
if (err) {
if (err.message.match(/enoent/i)) {
err = void 0;
exists = false;
}
} else if (stats.isFile()) {
exists = true;
} else {
err = new Error('weird stats: ' + JSON.stringify(stats));
}
done(err, exists);
});
}
fs.readdir(restSpecDir, function (err, files) {
if (err) {
throw err;
}
async.forEachSeries(files, function (fileName, done) {
var apiName = S(fileName.replace(/\.json$/, '')).camelize().s;
var filePath = './docs/_descriptions/' + apiName;
var jadeFileExists;
var asciiFileExists;
async.series([
function (done) {
fileExists(filePath + '.jade', function (err, exists) {
jadeFileExists = exists;
done(err);
});
},
function (done) {
fileExists(filePath + '.asciidoc', function (err, exists) {
asciiFileExists = exists;
done(err);
});
},
function (done) {
if (jadeFileExists && !asciiFileExists) {
console.log(apiName, 'jade, no ascii');
fs.rename(filePath + '.jade', filePath + '.asciidoc', done);
}
else if (!jadeFileExists && !asciiFileExists) {
console.log(apiName, 'no jade, no ascii');
fs.writeFile(filePath + '.asciidoc', '', done);
}
else if (jadeFileExists) {
console.log(apiName, 'jade');
fs.unlink(filePath + '.jade', done);
}
}
], done);
}, function done(err) {
if (err) {
throw err;
} else {
console.log('done');
}
});
});

Some files were not shown because too many files have changed in this diff Show More