Elasticsearch v8 (#1558)
This commit is contained in:
committed by
GitHub
parent
4c72b981cd
commit
1a227459f0
@ -1,95 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { errors } = require('../../index')
|
||||
const { Client, buildServer } = require('../utils')
|
||||
|
||||
function runAsyncTest (test) {
|
||||
test('async await (search)', t => {
|
||||
t.plan(1)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, async ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
try {
|
||||
const { body } = await client.search({
|
||||
index: 'test',
|
||||
type: 'doc',
|
||||
q: 'foo:bar'
|
||||
})
|
||||
t.same(body, { hello: 'world' })
|
||||
} catch (err) {
|
||||
t.fail(err)
|
||||
}
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
|
||||
test('async await (index)', t => {
|
||||
t.plan(1)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, async ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
try {
|
||||
await client.index({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.pass('ok')
|
||||
} catch (err) {
|
||||
t.fail(err)
|
||||
}
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
|
||||
test('async await (ConfigurationError)', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
try {
|
||||
await client.index({ body: { foo: 'bar' } })
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = runAsyncTest
|
||||
@ -1,335 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { errors } = require('../../index')
|
||||
const { Client, buildServer } = require('../utils')
|
||||
|
||||
test('Basic (callback)', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.same(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Basic (promises)', t => {
|
||||
t.plan(1)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
client
|
||||
.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
})
|
||||
.then(({ body }) => {
|
||||
t.same(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
.catch(t.fail)
|
||||
})
|
||||
})
|
||||
|
||||
test('Error (callback)', t => {
|
||||
t.plan(1)
|
||||
|
||||
function handler (req, res) {
|
||||
res.statusCode = 500
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, { body }) => {
|
||||
t.ok(err)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Error (promises)', t => {
|
||||
t.plan(1)
|
||||
|
||||
function handler (req, res) {
|
||||
res.statusCode = 500
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
client
|
||||
.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
})
|
||||
.then(t.fail)
|
||||
.catch(err => {
|
||||
t.ok(err)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Finally method (promises)', t => {
|
||||
t.plan(1)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
const request = client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
})
|
||||
|
||||
t.type(request.finally, 'function')
|
||||
|
||||
request
|
||||
.finally(() => {
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Abort method (callback)', t => {
|
||||
t.plan(3)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
const request = client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.same(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
|
||||
t.type(request.abort, 'function')
|
||||
})
|
||||
})
|
||||
|
||||
test('Abort method (promises)', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
const request = client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
})
|
||||
|
||||
request
|
||||
.then(({ body }) => {
|
||||
t.same(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
.catch(t.fail)
|
||||
|
||||
t.type(request.abort, 'function')
|
||||
})
|
||||
})
|
||||
|
||||
test('Basic (options and callback)', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, {
|
||||
requestTimeout: 10000
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.same(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Basic (options and promises)', t => {
|
||||
t.plan(1)
|
||||
|
||||
function handler (req, res) {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
client
|
||||
.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, {
|
||||
requestTimeout: 10000
|
||||
})
|
||||
.then(({ body }) => {
|
||||
t.same(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
.catch(t.fail)
|
||||
})
|
||||
})
|
||||
|
||||
test('If the API uses the same key for both url and query parameter, the url should win', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
t.equal(req.url, '/index/_bulk')
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
// bulk has two `type` parameters
|
||||
client.bulk({
|
||||
index: 'index',
|
||||
body: []
|
||||
}, (err, { body, warnings }) => {
|
||||
t.error(err)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('ConfigurationError (callback)', t => {
|
||||
t.plan(1)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
client.index({
|
||||
body: { foo: 'bar' }
|
||||
}, (err, { body }) => {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
})
|
||||
})
|
||||
|
||||
test('ConfigurationError (promises)', t => {
|
||||
t.plan(1)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
client
|
||||
.index({ body: { foo: 'bar' } })
|
||||
.then(t.fail)
|
||||
.catch(err => {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
})
|
||||
})
|
||||
|
||||
test('The callback with a sync error should be called in the next tick', t => {
|
||||
t.plan(4)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
const transportReturn = client.index({ body: { foo: 'bar' } }, (err, result) => {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
})
|
||||
|
||||
t.type(transportReturn.then, 'function')
|
||||
t.type(transportReturn.catch, 'function')
|
||||
t.type(transportReturn.abort, 'function')
|
||||
})
|
||||
|
||||
if (Number(process.version.split('.')[0].slice(1)) >= 8) {
|
||||
require('./api-async')(test)
|
||||
}
|
||||
@ -1,505 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { URL } = require('url')
|
||||
const BaseConnectionPool = require('../../lib/pool/BaseConnectionPool')
|
||||
const Connection = require('../../lib/Connection')
|
||||
|
||||
test('API', t => {
|
||||
t.test('addConnection', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const href = 'http://localhost:9200/'
|
||||
pool.addConnection(href)
|
||||
t.ok(pool.connections.find(c => c.id === href) instanceof Connection)
|
||||
t.equal(pool.connections.find(c => c.id === href).status, Connection.statuses.ALIVE)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('addConnection should throw with two connections with the same id', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const href = 'http://localhost:9200/'
|
||||
pool.addConnection(href)
|
||||
try {
|
||||
pool.addConnection(href)
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.equal(err.message, `Connection with id '${href}' is already present`)
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('addConnection should handle not-friendly url parameters for user and password', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const href = 'http://us"er:p@assword@localhost:9200/'
|
||||
pool.addConnection(href)
|
||||
const conn = pool.connections[0]
|
||||
t.equal(conn.url.username, 'us%22er')
|
||||
t.equal(conn.url.password, 'p%40assword')
|
||||
t.match(conn.headers, {
|
||||
authorization: 'Basic ' + Buffer.from('us"er:p@assword').toString('base64')
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('markDead', t => {
|
||||
const pool = new BaseConnectionPool({ Connection, sniffEnabled: true })
|
||||
const href = 'http://localhost:9200/'
|
||||
let connection = pool.addConnection(href)
|
||||
t.same(pool.markDead(connection), pool)
|
||||
connection = pool.connections.find(c => c.id === href)
|
||||
t.equal(connection.status, Connection.statuses.ALIVE)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('markAlive', t => {
|
||||
const pool = new BaseConnectionPool({ Connection, sniffEnabled: true })
|
||||
const href = 'http://localhost:9200/'
|
||||
let connection = pool.addConnection(href)
|
||||
t.same(pool.markAlive(connection), pool)
|
||||
connection = pool.connections.find(c => c.id === href)
|
||||
t.equal(connection.status, Connection.statuses.ALIVE)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('getConnection should throw', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const href = 'http://localhost:9200/'
|
||||
pool.addConnection(href)
|
||||
try {
|
||||
pool.getConnection()
|
||||
t.fail('Should fail')
|
||||
} catch (err) {
|
||||
t.equal(err.message, 'getConnection must be implemented')
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('removeConnection', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const href = 'http://localhost:9200/'
|
||||
const connection = pool.addConnection(href)
|
||||
pool.removeConnection(connection)
|
||||
t.equal(pool.size, 0)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('empty', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
pool.addConnection('http://localhost:9200/')
|
||||
pool.addConnection('http://localhost:9201/')
|
||||
pool.empty(() => {
|
||||
t.equal(pool.size, 0)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.test('urlToHost', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const url = 'http://localhost:9200'
|
||||
t.same(
|
||||
pool.urlToHost(url),
|
||||
{ url: new URL(url) }
|
||||
)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('nodesToHost', t => {
|
||||
t.test('publish_address as ip address (IPv4)', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: '127.0.0.1:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: '127.0.0.1:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, '127.0.0.1:9200')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.host, '127.0.0.1:9201')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('publish_address as ip address (IPv6)', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: '[::1]:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: '[::1]:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://[::1]:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://[::1]:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, '[::1]:9200')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.host, '[::1]:9201')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('publish_address as host/ip (IPv4)', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: 'example.com/127.0.0.1:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: 'example.com/127.0.0.1:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://example.com:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://example.com:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, 'example.com:9200')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.host, 'example.com:9201')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('publish_address as host/ip (IPv6)', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: 'example.com/[::1]:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: 'example.com/[::1]:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://example.com:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://example.com:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, 'example.com:9200')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.host, 'example.com:9201')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Should use the configure protocol', t => {
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: 'example.com/127.0.0.1:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: 'example.com/127.0.0.1:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'https:')[0].url.protocol, 'https:')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.protocol, 'http:')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('update', t => {
|
||||
t.test('Should not update existing connections', t => {
|
||||
t.plan(2)
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
pool.addConnection([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
}])
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.ok(pool.connections.find(c => c.id === 'a1').roles !== null)
|
||||
t.ok(pool.connections.find(c => c.id === 'a2').roles !== null)
|
||||
})
|
||||
|
||||
t.test('Should not update existing connections (mark alive)', t => {
|
||||
t.plan(5)
|
||||
class CustomBaseConnectionPool extends BaseConnectionPool {
|
||||
markAlive (connection) {
|
||||
t.ok('called')
|
||||
super.markAlive(connection)
|
||||
}
|
||||
}
|
||||
const pool = new CustomBaseConnectionPool({ Connection })
|
||||
const conn1 = pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
})
|
||||
|
||||
const conn2 = pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
})
|
||||
|
||||
pool.markDead(conn1)
|
||||
pool.markDead(conn2)
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.ok(pool.connections.find(c => c.id === 'a1').roles !== null)
|
||||
t.ok(pool.connections.find(c => c.id === 'a2').roles !== null)
|
||||
})
|
||||
|
||||
t.test('Should not update existing connections (same url, different id)', t => {
|
||||
t.plan(3)
|
||||
class CustomBaseConnectionPool extends BaseConnectionPool {
|
||||
markAlive (connection) {
|
||||
t.ok('called')
|
||||
super.markAlive(connection)
|
||||
}
|
||||
}
|
||||
const pool = new CustomBaseConnectionPool({ Connection })
|
||||
pool.addConnection([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'http://127.0.0.1:9200/',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
}])
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: true
|
||||
}])
|
||||
|
||||
// roles will never be updated, we only use it to do
|
||||
// a dummy check to see if the connection has been updated
|
||||
t.same(pool.connections.find(c => c.id === 'a1').roles, {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
})
|
||||
t.equal(pool.connections.find(c => c.id === 'http://127.0.0.1:9200/'), undefined)
|
||||
})
|
||||
|
||||
t.test('Add a new connection', t => {
|
||||
t.plan(2)
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
})
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.ok(pool.connections.find(c => c.id === 'a1').roles !== null)
|
||||
t.ok(pool.connections.find(c => c.id === 'a2'))
|
||||
})
|
||||
|
||||
t.test('Remove old connections', t => {
|
||||
t.plan(3)
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
})
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a3',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.notOk(pool.connections.find(c => c.id === 'a1'))
|
||||
t.ok(pool.connections.find(c => c.id === 'a2'))
|
||||
t.ok(pool.connections.find(c => c.id === 'a3'))
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('CreateConnection', t => {
|
||||
t.plan(1)
|
||||
const pool = new BaseConnectionPool({ Connection })
|
||||
const conn = pool.createConnection('http://localhost:9200')
|
||||
pool.connections.push(conn)
|
||||
try {
|
||||
pool.createConnection('http://localhost:9200')
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.equal(err.message, 'Connection with id \'http://localhost:9200/\' is already present')
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
@ -1,321 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { errors } = require('../../index')
|
||||
const {
|
||||
Client,
|
||||
buildServer,
|
||||
connection: { MockConnection }
|
||||
} = require('../utils')
|
||||
|
||||
test('Should create a child client (headers check)', t => {
|
||||
t.plan(4)
|
||||
|
||||
let count = 0
|
||||
function handler (req, res) {
|
||||
if (count++ === 0) {
|
||||
t.match(req.headers, { 'x-foo': 'bar' })
|
||||
} else {
|
||||
t.match(req.headers, { 'x-baz': 'faz' })
|
||||
}
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`,
|
||||
headers: { 'x-foo': 'bar' }
|
||||
})
|
||||
const child = client.child({
|
||||
headers: { 'x-baz': 'faz' }
|
||||
})
|
||||
|
||||
client.info((err, res) => {
|
||||
t.error(err)
|
||||
child.info((err, res) => {
|
||||
t.error(err)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Should create a child client (timeout check)', t => {
|
||||
t.plan(2)
|
||||
|
||||
function handler (req, res) {
|
||||
setTimeout(() => {
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}, 50)
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({ node: `http://localhost:${port}` })
|
||||
const child = client.child({ requestTimeout: 25, maxRetries: 0 })
|
||||
|
||||
client.info((err, res) => {
|
||||
t.error(err)
|
||||
child.info((err, res) => {
|
||||
t.ok(err instanceof errors.TimeoutError)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test('Client extensions', t => {
|
||||
t.test('One level', t => {
|
||||
t.plan(1)
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
client.extend('utility.index', () => {
|
||||
return () => t.ok('called')
|
||||
})
|
||||
|
||||
const child = client.child()
|
||||
child.utility.index()
|
||||
})
|
||||
|
||||
t.test('Two levels', t => {
|
||||
t.plan(2)
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
client.extend('utility.index', () => {
|
||||
return () => t.ok('called')
|
||||
})
|
||||
|
||||
const child = client.child()
|
||||
child.extend('utility.search', () => {
|
||||
return () => t.ok('called')
|
||||
})
|
||||
|
||||
const grandchild = child.child()
|
||||
grandchild.utility.index()
|
||||
grandchild.utility.search()
|
||||
})
|
||||
|
||||
t.test('The child should not extend the parent', t => {
|
||||
t.plan(1)
|
||||
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const child = client.child()
|
||||
|
||||
child.extend('utility.index', () => {
|
||||
return () => t.fail('Should not be called')
|
||||
})
|
||||
|
||||
try {
|
||||
client.utility.index()
|
||||
} catch (err) {
|
||||
t.ok(err)
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Should share the event emitter', t => {
|
||||
t.test('One level', t => {
|
||||
t.plan(2)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const child = client.child()
|
||||
|
||||
client.on('response', (err, meta) => {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
child.info((err, res) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Two levels', t => {
|
||||
t.plan(2)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const child = client.child()
|
||||
const grandchild = child.child()
|
||||
|
||||
client.on('response', (err, meta) => {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
grandchild.info((err, res) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Child listener - one level', t => {
|
||||
t.plan(2)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const child = client.child()
|
||||
|
||||
child.on('response', (err, meta) => {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
child.info((err, res) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Child listener - two levels', t => {
|
||||
t.plan(2)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const child = client.child()
|
||||
const grandchild = child.child()
|
||||
|
||||
child.on('response', (err, meta) => {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
grandchild.info((err, res) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Should create a child client (generateRequestId check)', t => {
|
||||
t.plan(6)
|
||||
|
||||
function generateRequestId1 () {
|
||||
let id = 0
|
||||
return () => `trace-1-${id++}`
|
||||
}
|
||||
|
||||
function generateRequestId2 () {
|
||||
let id = 0
|
||||
return () => `trace-2-${id++}`
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
generateRequestId: generateRequestId1()
|
||||
})
|
||||
const child = client.child({
|
||||
Connection: MockConnection,
|
||||
generateRequestId: generateRequestId2()
|
||||
})
|
||||
|
||||
let count = 0
|
||||
client.on('request', (err, { meta }) => {
|
||||
t.error(err)
|
||||
t.equal(
|
||||
meta.request.id,
|
||||
count++ === 0 ? 'trace-1-0' : 'trace-2-0'
|
||||
)
|
||||
})
|
||||
|
||||
client.info(err => {
|
||||
t.error(err)
|
||||
child.info(t.error)
|
||||
})
|
||||
})
|
||||
|
||||
test('Should create a child client (name check)', t => {
|
||||
t.plan(8)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
name: 'parent'
|
||||
})
|
||||
const child = client.child({
|
||||
Connection: MockConnection,
|
||||
name: 'child'
|
||||
})
|
||||
|
||||
t.equal(client.name, 'parent')
|
||||
t.equal(child.name, 'child')
|
||||
|
||||
let count = 0
|
||||
client.on('request', (err, { meta }) => {
|
||||
t.error(err)
|
||||
t.equal(
|
||||
meta.name,
|
||||
count++ === 0 ? 'parent' : 'child'
|
||||
)
|
||||
})
|
||||
|
||||
client.info(err => {
|
||||
t.error(err)
|
||||
child.info(t.error)
|
||||
})
|
||||
})
|
||||
|
||||
test('Should create a child client (auth check)', t => {
|
||||
t.plan(4)
|
||||
|
||||
let count = 0
|
||||
function handler (req, res) {
|
||||
if (count++ === 0) {
|
||||
t.match(req.headers, { authorization: 'Basic Zm9vOmJhcg==' })
|
||||
} else {
|
||||
t.match(req.headers, { authorization: 'ApiKey foobar' })
|
||||
}
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`,
|
||||
auth: {
|
||||
username: 'foo',
|
||||
password: 'bar'
|
||||
}
|
||||
})
|
||||
const child = client.child({
|
||||
auth: {
|
||||
apiKey: 'foobar'
|
||||
}
|
||||
})
|
||||
|
||||
client.info((err, res) => {
|
||||
t.error(err)
|
||||
child.info((err, res) => {
|
||||
t.error(err)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
434
test/unit/client.test.ts
Normal file
434
test/unit/client.test.ts
Normal file
@ -0,0 +1,434 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { test } from 'tap'
|
||||
import { URL } from 'url'
|
||||
import { connection } from '../utils'
|
||||
import { Client, errors } from '../..'
|
||||
import * as symbols from '@elastic/transport/lib/symbols'
|
||||
import { BaseConnectionPool, CloudConnectionPool, WeightedConnectionPool } from '@elastic/transport'
|
||||
|
||||
let clientVersion: string = require('../../package.json').version // eslint-disable-line
|
||||
if (clientVersion.includes('-')) {
|
||||
clientVersion = clientVersion.slice(0, clientVersion.indexOf('-')) + 'p'
|
||||
}
|
||||
let transportVersion: string = require('@elastic/transport/package.json').version // eslint-disable-line
|
||||
if (transportVersion.includes('-')) {
|
||||
transportVersion = transportVersion.slice(0, transportVersion.indexOf('-')) + 'p'
|
||||
}
|
||||
const nodeVersion = process.versions.node
|
||||
|
||||
test('Create a client instance, single node as string', t => {
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
t.ok(client.connectionPool instanceof WeightedConnectionPool)
|
||||
t.equal(client.connectionPool.size, 1)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Create a client instance, multi node as strings', t => {
|
||||
const client = new Client({ nodes: ['http://localhost:9200', 'http://localhost:9201'] })
|
||||
t.ok(client.connectionPool instanceof WeightedConnectionPool)
|
||||
t.equal(client.connectionPool.size, 2)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Create a client instance, single node as object', t => {
|
||||
const client = new Client({
|
||||
node: {
|
||||
url: new URL('http://localhost:9200')
|
||||
}
|
||||
})
|
||||
t.equal(client.connectionPool.size, 1)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Create a client instance, multi node as object', t => {
|
||||
const client = new Client({
|
||||
nodes: [{
|
||||
url: new URL('http://localhost:9200')
|
||||
}, {
|
||||
url: new URL('http://localhost:9201')
|
||||
}]
|
||||
})
|
||||
t.equal(client.connectionPool.size, 2)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Missing node(s)', t => {
|
||||
t.throws(() => new Client({}), errors.ConfigurationError)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Custom headers', t => {
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
headers: { foo: 'bar' }
|
||||
})
|
||||
t.match(client.transport[symbols.kHeaders], { foo: 'bar' })
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Basic auth', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.match(opts.headers, { authorization: 'Basic aGVsbG86d29ybGQ=' })
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection,
|
||||
auth: {
|
||||
username: 'hello',
|
||||
password: 'world'
|
||||
}
|
||||
})
|
||||
|
||||
await client.transport.request({ method: 'GET', path: '/' })
|
||||
})
|
||||
|
||||
test('Basic auth via url', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.match(opts.headers, { authorization: 'Basic aGVsbG86d29ybGQ=' })
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://hello:world@localhost:9200',
|
||||
Connection
|
||||
})
|
||||
|
||||
await client.transport.request({ method: 'GET', path: '/' })
|
||||
})
|
||||
|
||||
test('ApiKey as string', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.match(opts.headers, { authorization: 'ApiKey foobar' })
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection,
|
||||
auth: {
|
||||
apiKey: 'foobar'
|
||||
}
|
||||
})
|
||||
|
||||
await client.transport.request({ method: 'GET', path: '/' })
|
||||
})
|
||||
|
||||
test('ApiKey as object', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.match(opts.headers, { authorization: 'ApiKey Zm9vOmJhcg==' })
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection,
|
||||
auth: {
|
||||
apiKey: {
|
||||
id: 'foo',
|
||||
api_key: 'bar'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await client.transport.request({ method: 'GET', path: '/' })
|
||||
})
|
||||
|
||||
test('Bearer auth', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.match(opts.headers, { authorization: 'Bearer token' })
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection,
|
||||
auth: {
|
||||
bearer: 'token'
|
||||
}
|
||||
})
|
||||
|
||||
await client.transport.request({ method: 'GET', path: '/' })
|
||||
})
|
||||
|
||||
test('Override authentication per request', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.match(opts.headers, { authorization: 'Basic foobar' })
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection,
|
||||
auth: {
|
||||
username: 'hello',
|
||||
password: 'world'
|
||||
}
|
||||
})
|
||||
|
||||
await client.transport.request(
|
||||
{ method: 'GET', path: '/' },
|
||||
{ headers: { authorization: 'Basic foobar' } }
|
||||
)
|
||||
})
|
||||
|
||||
test('Custom headers per request', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.match(opts.headers, {
|
||||
foo: 'bar',
|
||||
faz: 'bar'
|
||||
})
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection,
|
||||
headers: { foo: 'bar' }
|
||||
})
|
||||
|
||||
await client.transport.request(
|
||||
{ method: 'GET', path: '/' },
|
||||
{ headers: { faz: 'bar' } }
|
||||
)
|
||||
})
|
||||
|
||||
test('Close the client', async t => {
|
||||
t.plan(1)
|
||||
|
||||
class MyConnectionPool extends BaseConnectionPool {
|
||||
async empty (): Promise<void> {
|
||||
t.pass('called')
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
ConnectionPool: MyConnectionPool
|
||||
})
|
||||
|
||||
await client.close()
|
||||
})
|
||||
|
||||
test('Elastic Cloud config', t => {
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
// 'localhost$abcd$'
|
||||
id: 'name:bG9jYWxob3N0JGFiY2Qk'
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
}
|
||||
})
|
||||
|
||||
t.ok(client.connectionPool instanceof CloudConnectionPool)
|
||||
t.match(client.connectionPool.connections.find(c => c.id === 'https://abcd.localhost/'), {
|
||||
url: new URL('https://elastic:changeme@abcd.localhost'),
|
||||
id: 'https://abcd.localhost/',
|
||||
headers: {
|
||||
authorization: 'Basic ' + Buffer.from('elastic:changeme').toString('base64')
|
||||
},
|
||||
tls: { secureProtocol: 'TLSv1_2_method' }
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Override default Elastic Cloud options', t => {
|
||||
const client = new Client({
|
||||
cloud: {
|
||||
// 'localhost$abcd$efgh'
|
||||
id: 'name:bG9jYWxob3N0JGFiY2QkZWZnaA==',
|
||||
},
|
||||
auth: {
|
||||
username: 'elastic',
|
||||
password: 'changeme'
|
||||
},
|
||||
compression: false,
|
||||
tls: {
|
||||
secureProtocol: 'TLSv1_1_method'
|
||||
}
|
||||
})
|
||||
|
||||
t.ok(client.connectionPool instanceof CloudConnectionPool)
|
||||
t.equal(client.transport[symbols.kCompression], false)
|
||||
t.same(client.connectionPool._tls, { secureProtocol: 'TLSv1_1_method' })
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Configure opaqueIdPrefix', t => {
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
opaqueIdPrefix: 'foobar'
|
||||
})
|
||||
|
||||
t.equal(client.transport[symbols.kOpaqueIdPrefix], 'foobar')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('name as string', t => {
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
name: 'es-client'
|
||||
})
|
||||
|
||||
t.equal(client.name, 'es-client')
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('name as symbol', t => {
|
||||
const s = Symbol()
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
name: s
|
||||
})
|
||||
|
||||
t.equal(client.name, s)
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Meta header enabled by default', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.match(opts.headers, { 'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${transportVersion},hc=${nodeVersion}` })
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection
|
||||
})
|
||||
|
||||
await client.transport.request({ method: 'GET', path: '/' })
|
||||
})
|
||||
|
||||
test('Meta header disabled', async t => {
|
||||
t.plan(1)
|
||||
|
||||
const Connection = connection.buildMockConnection({
|
||||
onRequest (opts) {
|
||||
t.notOk(opts.headers?.['x-elastic-client-meta'])
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: { hello: 'world' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection,
|
||||
enableMetaHeader: false
|
||||
})
|
||||
|
||||
await client.transport.request({ method: 'GET', path: '/' })
|
||||
})
|
||||
|
||||
test('caFingerprint', t => {
|
||||
const client = new Client({
|
||||
node: 'https://localhost:9200',
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
})
|
||||
|
||||
t.equal(client.connectionPool[symbols.kCaFingerprint], 'FO:OB:AR')
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('caFingerprint can\'t be configured over http / 1', t => {
|
||||
t.throws(() => new Client({
|
||||
node: 'http://localhost:9200',
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
}),
|
||||
errors.ConfigurationError
|
||||
)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('caFingerprint can\'t be configured over http / 2', t => {
|
||||
t.throws(() => new Client({
|
||||
nodes: ['http://localhost:9200'],
|
||||
caFingerprint: 'FO:OB:AR'
|
||||
}),
|
||||
errors.ConfigurationError
|
||||
)
|
||||
t.end()
|
||||
})
|
||||
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { CloudConnectionPool } = require('../../lib/pool')
|
||||
const Connection = require('../../lib/Connection')
|
||||
|
||||
test('Should expose a cloudConnection property', t => {
|
||||
const pool = new CloudConnectionPool({ Connection })
|
||||
pool.addConnection('http://localhost:9200/')
|
||||
t.ok(pool.cloudConnection instanceof Connection)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Get connection should always return cloudConnection', t => {
|
||||
const pool = new CloudConnectionPool({ Connection })
|
||||
const conn = pool.addConnection('http://localhost:9200/')
|
||||
t.same(pool.getConnection(), conn)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('pool.empty should reset cloudConnection', t => {
|
||||
const pool = new CloudConnectionPool({ Connection })
|
||||
pool.addConnection('http://localhost:9200/')
|
||||
t.ok(pool.cloudConnection instanceof Connection)
|
||||
pool.empty(() => {
|
||||
t.equal(pool.cloudConnection, null)
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
@ -1,801 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { URL } = require('url')
|
||||
const ConnectionPool = require('../../lib/pool/ConnectionPool')
|
||||
const Connection = require('../../lib/Connection')
|
||||
const { defaultNodeFilter, roundRobinSelector } = require('../../lib/Transport').internals
|
||||
const { connection: { MockConnection, MockConnectionTimeout } } = require('../utils')
|
||||
|
||||
test('API', t => {
|
||||
t.test('addConnection', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href = 'http://localhost:9200/'
|
||||
pool.addConnection(href)
|
||||
t.ok(pool.connections.find(c => c.id === href) instanceof Connection)
|
||||
t.equal(pool.connections.find(c => c.id === href).status, Connection.statuses.ALIVE)
|
||||
t.same(pool.dead, [])
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('addConnection should throw with two connections with the same id', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href = 'http://localhost:9200/'
|
||||
pool.addConnection(href)
|
||||
try {
|
||||
pool.addConnection(href)
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.equal(err.message, `Connection with id '${href}' is already present`)
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('addConnection should handle not-friendly url parameters for user and password', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href = 'http://us"er:p@assword@localhost:9200/'
|
||||
pool.addConnection(href)
|
||||
const conn = pool.getConnection()
|
||||
t.equal(conn.url.username, 'us%22er')
|
||||
t.equal(conn.url.password, 'p%40assword')
|
||||
t.match(conn.headers, {
|
||||
authorization: 'Basic ' + Buffer.from('us"er:p@assword').toString('base64')
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('markDead', t => {
|
||||
const pool = new ConnectionPool({ Connection, sniffEnabled: true })
|
||||
const href = 'http://localhost:9200/'
|
||||
let connection = pool.addConnection(href)
|
||||
pool.markDead(connection)
|
||||
connection = pool.connections.find(c => c.id === href)
|
||||
t.equal(connection.deadCount, 1)
|
||||
t.ok(connection.resurrectTimeout > 0)
|
||||
t.same(pool.dead, [href])
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('markDead should sort the dead queue by deadTimeout', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href1 = 'http://localhost:9200/1'
|
||||
const href2 = 'http://localhost:9200/2'
|
||||
const conn1 = pool.addConnection(href1)
|
||||
const conn2 = pool.addConnection(href2)
|
||||
pool.markDead(conn2)
|
||||
setTimeout(() => {
|
||||
pool.markDead(conn1)
|
||||
t.same(pool.dead, [href2, href1])
|
||||
t.end()
|
||||
}, 10)
|
||||
})
|
||||
|
||||
t.test('markDead should ignore connections that no longer exists', t => {
|
||||
const pool = new ConnectionPool({ Connection, sniffEnabled: true })
|
||||
pool.addConnection('http://localhost:9200/')
|
||||
pool.markDead({ id: 'foo-bar' })
|
||||
t.same(pool.dead, [])
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('markAlive', t => {
|
||||
const pool = new ConnectionPool({ Connection, sniffEnabled: true })
|
||||
const href = 'http://localhost:9200/'
|
||||
let connection = pool.addConnection(href)
|
||||
pool.markDead(connection)
|
||||
pool.markAlive(connection)
|
||||
connection = pool.connections.find(c => c.id === href)
|
||||
t.equal(connection.deadCount, 0)
|
||||
t.equal(connection.resurrectTimeout, 0)
|
||||
t.equal(connection.status, Connection.statuses.ALIVE)
|
||||
t.same(pool.dead, [])
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('resurrect', t => {
|
||||
t.test('ping strategy', t => {
|
||||
t.test('alive', t => {
|
||||
const pool = new ConnectionPool({
|
||||
resurrectStrategy: 'ping',
|
||||
pingTimeout: 3000,
|
||||
Connection: MockConnection,
|
||||
sniffEnabled: true
|
||||
})
|
||||
const href = 'http://localhost:9200/'
|
||||
const connection = pool.addConnection(href)
|
||||
pool.markDead(connection)
|
||||
const opts = {
|
||||
now: Date.now() + 1000 * 60 * 3,
|
||||
requestId: 1,
|
||||
name: 'elasticsearch-js'
|
||||
}
|
||||
pool.resurrect(opts, (isAlive, connection) => {
|
||||
t.ok(isAlive)
|
||||
connection = pool.connections.find(c => c.id === connection.id)
|
||||
t.equal(connection.deadCount, 0)
|
||||
t.equal(connection.resurrectTimeout, 0)
|
||||
t.equal(connection.status, Connection.statuses.ALIVE)
|
||||
t.same(pool.dead, [])
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.test('dead', t => {
|
||||
const pool = new ConnectionPool({
|
||||
resurrectStrategy: 'ping',
|
||||
pingTimeout: 3000,
|
||||
Connection: MockConnectionTimeout,
|
||||
sniffEnabled: true
|
||||
})
|
||||
const href = 'http://localhost:9200/'
|
||||
const connection = pool.addConnection(href)
|
||||
pool.markDead(connection)
|
||||
const opts = {
|
||||
now: Date.now() + 1000 * 60 * 3,
|
||||
requestId: 1,
|
||||
name: 'elasticsearch-js'
|
||||
}
|
||||
pool.resurrect(opts, (isAlive, connection) => {
|
||||
t.notOk(isAlive)
|
||||
connection = pool.connections.find(c => c.id === connection.id)
|
||||
t.equal(connection.deadCount, 2)
|
||||
t.ok(connection.resurrectTimeout > 0)
|
||||
t.equal(connection.status, Connection.statuses.DEAD)
|
||||
t.same(pool.dead, [href])
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('optimistic strategy', t => {
|
||||
const pool = new ConnectionPool({
|
||||
resurrectStrategy: 'optimistic',
|
||||
Connection,
|
||||
sniffEnabled: true
|
||||
})
|
||||
const href = 'http://localhost:9200/'
|
||||
const connection = pool.addConnection(href)
|
||||
pool.markDead(connection)
|
||||
const opts = {
|
||||
now: Date.now() + 1000 * 60 * 3,
|
||||
requestId: 1,
|
||||
name: 'elasticsearch-js'
|
||||
}
|
||||
pool.resurrect(opts, (isAlive, connection) => {
|
||||
t.ok(isAlive)
|
||||
connection = pool.connections.find(c => c.id === connection.id)
|
||||
t.equal(connection.deadCount, 1)
|
||||
t.ok(connection.resurrectTimeout > 0)
|
||||
t.equal(connection.status, Connection.statuses.ALIVE)
|
||||
t.same(pool.dead, [])
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.test('none strategy', t => {
|
||||
const pool = new ConnectionPool({
|
||||
resurrectStrategy: 'none',
|
||||
Connection,
|
||||
sniffEnabled: true
|
||||
})
|
||||
const href = 'http://localhost:9200/'
|
||||
const connection = pool.addConnection(href)
|
||||
pool.markDead(connection)
|
||||
const opts = {
|
||||
now: Date.now() + 1000 * 60 * 3,
|
||||
requestId: 1,
|
||||
name: 'elasticsearch-js'
|
||||
}
|
||||
pool.resurrect(opts, (isAlive, connection) => {
|
||||
t.ok(isAlive === null)
|
||||
t.ok(connection === null)
|
||||
connection = pool.connections.find(c => c.id === href)
|
||||
t.equal(connection.deadCount, 1)
|
||||
t.ok(connection.resurrectTimeout > 0)
|
||||
t.equal(connection.status, Connection.statuses.DEAD)
|
||||
t.same(pool.dead, [href])
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('getConnection', t => {
|
||||
t.test('Should return a connection', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href = 'http://localhost:9200/'
|
||||
pool.addConnection(href)
|
||||
t.ok(pool.getConnection() instanceof Connection)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('filter option', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href1 = 'http://localhost:9200/'
|
||||
const href2 = 'http://localhost:9200/other'
|
||||
pool.addConnection([href1, href2])
|
||||
|
||||
const filter = node => node.id === href1
|
||||
t.equal(pool.getConnection({ filter }).id, href1)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('filter should get Connection objects', t => {
|
||||
t.plan(2)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href1 = 'http://localhost:9200/'
|
||||
const href2 = 'http://localhost:9200/other'
|
||||
pool.addConnection([href1, href2])
|
||||
|
||||
const filter = node => {
|
||||
t.ok(node instanceof Connection)
|
||||
return true
|
||||
}
|
||||
pool.getConnection({ filter })
|
||||
})
|
||||
|
||||
t.test('filter should get alive connections', t => {
|
||||
t.plan(2)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href1 = 'http://localhost:9200/'
|
||||
const href2 = 'http://localhost:9200/other'
|
||||
const conn = pool.addConnection(href1)
|
||||
pool.addConnection([href2, `${href2}/stuff`])
|
||||
pool.markDead(conn)
|
||||
|
||||
const filter = node => {
|
||||
t.equal(node.status, Connection.statuses.ALIVE)
|
||||
return true
|
||||
}
|
||||
pool.getConnection({ filter })
|
||||
})
|
||||
|
||||
t.test('If all connections are marked as dead, getConnection should return a dead connection', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href1 = 'http://localhost:9200/'
|
||||
const href2 = 'http://localhost:9200/other'
|
||||
const conn1 = pool.addConnection(href1)
|
||||
const conn2 = pool.addConnection(href2)
|
||||
pool.markDead(conn1)
|
||||
pool.markDead(conn2)
|
||||
const conn = pool.getConnection()
|
||||
t.ok(conn instanceof Connection)
|
||||
t.equal(conn.status, 'dead')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('removeConnection', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const href = 'http://localhost:9200/'
|
||||
const connection = pool.addConnection(href)
|
||||
t.ok(pool.getConnection() instanceof Connection)
|
||||
pool.removeConnection(connection)
|
||||
t.equal(pool.getConnection(), null)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('empty', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection('http://localhost:9200/')
|
||||
pool.addConnection('http://localhost:9201/')
|
||||
pool.empty(() => {
|
||||
t.equal(pool.size, 0)
|
||||
t.same(pool.dead, [])
|
||||
t.end()
|
||||
})
|
||||
})
|
||||
|
||||
t.test('urlToHost', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const url = 'http://localhost:9200'
|
||||
t.same(
|
||||
pool.urlToHost(url),
|
||||
{ url: new URL(url) }
|
||||
)
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('nodesToHost', t => {
|
||||
t.test('publish_address as ip address (IPv4)', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: '127.0.0.1:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: '127.0.0.1:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, '127.0.0.1:9200')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.host, '127.0.0.1:9201')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('publish_address as ip address (IPv6)', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: '[::1]:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: '[::1]:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://[::1]:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://[::1]:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, '[::1]:9200')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.host, '[::1]:9201')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('publish_address as host/ip (IPv4)', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: 'example.com/127.0.0.1:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: 'example.com/127.0.0.1:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://example.com:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://example.com:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, 'example.com:9200')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.host, 'example.com:9201')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('publish_address as host/ip (IPv6)', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: 'example.com/[::1]:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: 'example.com/[::1]:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://example.com:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://example.com:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[0].url.host, 'example.com:9200')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.host, 'example.com:9201')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Should use the configure protocol', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: 'example.com/127.0.0.1:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: 'example.com/127.0.0.1:9201'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest']
|
||||
}
|
||||
}
|
||||
|
||||
t.equal(pool.nodesToHost(nodes, 'https:')[0].url.protocol, 'https:')
|
||||
t.equal(pool.nodesToHost(nodes, 'http:')[1].url.protocol, 'http:')
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Should map roles', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: 'example.com:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest', 'ml']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: 'example.com:9201'
|
||||
},
|
||||
roles: []
|
||||
}
|
||||
}
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://example.com:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: true
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://example.com:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: false,
|
||||
data: false,
|
||||
ingest: false,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('update', t => {
|
||||
t.test('Should not update existing connections', t => {
|
||||
t.plan(2)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
}])
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.ok(pool.connections.find(c => c.id === 'a1').roles !== null)
|
||||
t.ok(pool.connections.find(c => c.id === 'a2').roles !== null)
|
||||
})
|
||||
|
||||
t.test('Should not update existing connections (mark alive)', t => {
|
||||
t.plan(5)
|
||||
class CustomConnectionPool extends ConnectionPool {
|
||||
markAlive (connection) {
|
||||
t.ok('called')
|
||||
super.markAlive(connection)
|
||||
}
|
||||
}
|
||||
const pool = new CustomConnectionPool({ Connection })
|
||||
const conn1 = pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
})
|
||||
|
||||
const conn2 = pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
})
|
||||
|
||||
pool.markDead(conn1)
|
||||
pool.markDead(conn2)
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.ok(pool.connections.find(c => c.id === 'a1').roles !== null)
|
||||
t.ok(pool.connections.find(c => c.id === 'a2').roles !== null)
|
||||
})
|
||||
|
||||
t.test('Should not update existing connections (same url, different id)', t => {
|
||||
t.plan(3)
|
||||
class CustomConnectionPool extends ConnectionPool {
|
||||
markAlive (connection) {
|
||||
t.ok('called')
|
||||
super.markAlive(connection)
|
||||
}
|
||||
}
|
||||
const pool = new CustomConnectionPool({ Connection })
|
||||
pool.addConnection([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'http://127.0.0.1:9200/',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
}])
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: true
|
||||
}])
|
||||
|
||||
// roles will never be updated, we only use it to do
|
||||
// a dummy check to see if the connection has been updated
|
||||
t.same(pool.connections.find(c => c.id === 'a1').roles, {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: false
|
||||
})
|
||||
t.equal(pool.connections.find(c => c.id === 'http://127.0.0.1:9200/'), undefined)
|
||||
})
|
||||
|
||||
t.test('Add a new connection', t => {
|
||||
t.plan(2)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true
|
||||
}
|
||||
})
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.ok(pool.connections.find(c => c.id === 'a1').roles !== null)
|
||||
t.ok(pool.connections.find(c => c.id === 'a2'))
|
||||
})
|
||||
|
||||
t.test('Remove old connections', t => {
|
||||
t.plan(3)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
})
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a3',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.notOk(pool.connections.find(c => c.id === 'a1'))
|
||||
t.ok(pool.connections.find(c => c.id === 'a2'))
|
||||
t.ok(pool.connections.find(c => c.id === 'a3'))
|
||||
})
|
||||
|
||||
t.test('Remove old connections (markDead)', t => {
|
||||
t.plan(5)
|
||||
const pool = new ConnectionPool({ Connection, sniffEnabled: true })
|
||||
const conn = pool.addConnection({
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a1',
|
||||
roles: null
|
||||
})
|
||||
|
||||
pool.markDead(conn)
|
||||
t.same(pool.dead, ['a1'])
|
||||
|
||||
pool.update([{
|
||||
url: new URL('http://127.0.0.1:9200'),
|
||||
id: 'a2',
|
||||
roles: null
|
||||
}, {
|
||||
url: new URL('http://127.0.0.1:9201'),
|
||||
id: 'a3',
|
||||
roles: null
|
||||
}])
|
||||
|
||||
t.same(pool.dead, [])
|
||||
t.notOk(pool.connections.find(c => c.id === 'a1'))
|
||||
t.ok(pool.connections.find(c => c.id === 'a2'))
|
||||
t.ok(pool.connections.find(c => c.id === 'a3'))
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Node selector', t => {
|
||||
t.test('round-robin', t => {
|
||||
t.plan(1)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection('http://localhost:9200/')
|
||||
t.ok(pool.getConnection({ selector: roundRobinSelector() }) instanceof Connection)
|
||||
})
|
||||
|
||||
t.test('random', t => {
|
||||
t.plan(1)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection('http://localhost:9200/')
|
||||
t.ok(pool.getConnection({ selector: roundRobinSelector() }) instanceof Connection)
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Node filter', t => {
|
||||
t.test('default', t => {
|
||||
t.plan(1)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection({ url: new URL('http://localhost:9200/') })
|
||||
t.ok(pool.getConnection({ filter: defaultNodeFilter }) instanceof Connection)
|
||||
})
|
||||
|
||||
t.test('Should filter master only nodes', t => {
|
||||
t.plan(1)
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection({
|
||||
url: new URL('http://localhost:9200/'),
|
||||
roles: {
|
||||
master: true,
|
||||
data: false,
|
||||
ingest: false,
|
||||
ml: false
|
||||
}
|
||||
})
|
||||
t.equal(pool.getConnection({ filter: defaultNodeFilter }), null)
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,225 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint no-prototype-builtins: 0 */
|
||||
|
||||
const { test } = require('tap')
|
||||
const { errors } = require('../../index')
|
||||
|
||||
test('ElasticsearchClientError', t => {
|
||||
const err = new errors.ElasticsearchClientError()
|
||||
t.ok(err instanceof Error)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('TimeoutError', t => {
|
||||
const err = new errors.TimeoutError()
|
||||
t.ok(err instanceof Error)
|
||||
t.ok(err instanceof errors.ElasticsearchClientError)
|
||||
t.ok(err.hasOwnProperty('meta'))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ConnectionError', t => {
|
||||
const err = new errors.ConnectionError()
|
||||
t.ok(err instanceof Error)
|
||||
t.ok(err instanceof errors.ElasticsearchClientError)
|
||||
t.ok(err.hasOwnProperty('meta'))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('NoLivingConnectionsError', t => {
|
||||
const err = new errors.NoLivingConnectionsError()
|
||||
t.ok(err instanceof Error)
|
||||
t.ok(err instanceof errors.ElasticsearchClientError)
|
||||
t.ok(err.hasOwnProperty('meta'))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('SerializationError', t => {
|
||||
const err = new errors.SerializationError()
|
||||
t.ok(err instanceof Error)
|
||||
t.ok(err instanceof errors.ElasticsearchClientError)
|
||||
t.notOk(err.hasOwnProperty('meta'))
|
||||
t.ok(err.hasOwnProperty('data'))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('DeserializationError', t => {
|
||||
const err = new errors.DeserializationError()
|
||||
t.ok(err instanceof Error)
|
||||
t.ok(err instanceof errors.ElasticsearchClientError)
|
||||
t.notOk(err.hasOwnProperty('meta'))
|
||||
t.ok(err.hasOwnProperty('data'))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ConfigurationError', t => {
|
||||
const err = new errors.ConfigurationError()
|
||||
t.ok(err instanceof Error)
|
||||
t.ok(err instanceof errors.ElasticsearchClientError)
|
||||
t.notOk(err.hasOwnProperty('meta'))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ResponseError', t => {
|
||||
const meta = {
|
||||
body: 1,
|
||||
statusCode: 1,
|
||||
headers: 1
|
||||
}
|
||||
const err = new errors.ResponseError(meta)
|
||||
t.ok(err instanceof Error)
|
||||
t.ok(err instanceof errors.ElasticsearchClientError)
|
||||
t.ok(err.hasOwnProperty('meta'))
|
||||
t.ok(err.body)
|
||||
t.ok(err.statusCode)
|
||||
t.ok(err.headers)
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('RequestAbortedError', t => {
|
||||
const err = new errors.RequestAbortedError()
|
||||
t.ok(err instanceof Error)
|
||||
t.ok(err instanceof errors.ElasticsearchClientError)
|
||||
t.ok(err.hasOwnProperty('meta'))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ResponseError with meaningful message / 1', t => {
|
||||
const meta = {
|
||||
body: {
|
||||
error: {
|
||||
root_cause: [
|
||||
{
|
||||
type: 'index_not_found_exception',
|
||||
reason: 'no such index [foo]',
|
||||
'resource.type': 'index_expression',
|
||||
'resource.id': 'foo',
|
||||
index_uuid: '_na_',
|
||||
index: 'foo'
|
||||
}
|
||||
],
|
||||
type: 'index_not_found_exception',
|
||||
reason: 'no such index [foo]',
|
||||
'resource.type': 'index_expression',
|
||||
'resource.id': 'foo',
|
||||
index_uuid: '_na_',
|
||||
index: 'foo'
|
||||
},
|
||||
status: 404
|
||||
},
|
||||
statusCode: 404,
|
||||
headers: {}
|
||||
}
|
||||
const err = new errors.ResponseError(meta)
|
||||
t.equal(err.message, 'index_not_found_exception: [index_not_found_exception] Reason: no such index [foo]')
|
||||
t.equal(err.toString(), JSON.stringify(meta.body))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ResponseError with meaningful message / 2', t => {
|
||||
const meta = {
|
||||
body: {
|
||||
error: {
|
||||
root_cause: [
|
||||
{
|
||||
type: 'index_not_found_exception',
|
||||
reason: 'no such index [foo]',
|
||||
'resource.type': 'index_expression',
|
||||
'resource.id': 'foo',
|
||||
index_uuid: '_na_',
|
||||
index: 'foo'
|
||||
},
|
||||
{
|
||||
type: 'nested_cause',
|
||||
reason: 'this is a nested cause',
|
||||
'resource.type': 'index_expression',
|
||||
'resource.id': 'foo',
|
||||
index_uuid: '_na_',
|
||||
index: 'foo'
|
||||
}
|
||||
],
|
||||
type: 'index_not_found_exception',
|
||||
reason: 'no such index [foo]',
|
||||
'resource.type': 'index_expression',
|
||||
'resource.id': 'foo',
|
||||
index_uuid: '_na_',
|
||||
index: 'foo'
|
||||
},
|
||||
status: 404
|
||||
},
|
||||
statusCode: 404,
|
||||
headers: {}
|
||||
}
|
||||
const err = new errors.ResponseError(meta)
|
||||
t.equal(err.message, 'index_not_found_exception: [index_not_found_exception] Reason: no such index [foo]; [nested_cause] Reason: this is a nested cause')
|
||||
t.equal(err.toString(), JSON.stringify(meta.body))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ResponseError with meaningful message / 3', t => {
|
||||
const meta = {
|
||||
body: {
|
||||
error: {
|
||||
type: 'index_not_found_exception',
|
||||
reason: 'no such index [foo]',
|
||||
'resource.type': 'index_expression',
|
||||
'resource.id': 'foo',
|
||||
index_uuid: '_na_',
|
||||
index: 'foo'
|
||||
},
|
||||
status: 404
|
||||
},
|
||||
statusCode: 404,
|
||||
headers: {}
|
||||
}
|
||||
const err = new errors.ResponseError(meta)
|
||||
t.equal(err.message, 'index_not_found_exception')
|
||||
t.equal(err.toString(), JSON.stringify(meta.body))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ResponseError with meaningful message when body is not json', t => {
|
||||
const meta = {
|
||||
statusCode: 400,
|
||||
body: '<html><body>error!</body></html>',
|
||||
headers: { 'content-type': 'text/html' }
|
||||
}
|
||||
const err = new errors.ResponseError(meta)
|
||||
t.equal(err.name, 'ResponseError')
|
||||
t.equal(err.message, '<html><body>error!</body></html>')
|
||||
t.equal(err.toString(), JSON.stringify(meta.body))
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('ResponseError with meaningful message when body is falsy', t => {
|
||||
const meta = {
|
||||
statusCode: 400,
|
||||
body: '',
|
||||
headers: { 'content-type': 'text/plain' }
|
||||
}
|
||||
const err = new errors.ResponseError(meta)
|
||||
t.equal(err.name, 'ResponseError')
|
||||
t.equal(err.message, 'Response Error')
|
||||
t.equal(err.toString(), JSON.stringify(meta.body))
|
||||
t.end()
|
||||
})
|
||||
@ -1,8 +0,0 @@
|
||||
import t from 'tap'
|
||||
import { Client } from '../../../index.mjs'
|
||||
|
||||
t.test('esm support', t => {
|
||||
t.plan(1)
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
t.equal(client.name, 'elasticsearch-js')
|
||||
})
|
||||
@ -1,19 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const t = require('tap')
|
||||
const semver = require('semver')
|
||||
|
||||
if (semver.lt(process.versions.node, '12.17.0')) {
|
||||
t.skip('Skip because Node version < 12.17.0')
|
||||
t.end()
|
||||
} else {
|
||||
// Node v8 throw a `SyntaxError: Unexpected token import`
|
||||
// even if this branch is never touch in the code,
|
||||
// by using `eval` we can avoid this issue.
|
||||
// eslint-disable-next-line
|
||||
new Function('module', 'return import(module)')('./index.mjs').catch((err) => {
|
||||
process.nextTick(() => {
|
||||
throw err
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -1,297 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { events } = require('../../index')
|
||||
const { TimeoutError } = require('../../lib/errors')
|
||||
const {
|
||||
Client,
|
||||
connection: {
|
||||
MockConnection,
|
||||
MockConnectionTimeout
|
||||
}
|
||||
} = require('../utils')
|
||||
|
||||
test('Should emit a request event when a request is performed', t => {
|
||||
t.plan(3)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.on(events.REQUEST, (err, request) => {
|
||||
t.error(err)
|
||||
t.match(request, {
|
||||
body: null,
|
||||
statusCode: null,
|
||||
headers: null,
|
||||
warnings: null,
|
||||
meta: {
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
request: {
|
||||
params: {
|
||||
method: 'GET',
|
||||
path: '/test/_search',
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar'
|
||||
},
|
||||
options: {},
|
||||
id: 1
|
||||
},
|
||||
connection: {
|
||||
id: 'http://localhost:9200'
|
||||
},
|
||||
attempts: 0,
|
||||
aborted: false
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
test('Should emit a request event once when a request is performed', t => {
|
||||
t.plan(4)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.once(events.REQUEST, (err, request) => {
|
||||
t.error(err)
|
||||
t.match(request, {
|
||||
body: null,
|
||||
statusCode: null,
|
||||
headers: null,
|
||||
warnings: null,
|
||||
meta: {
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
request: {
|
||||
params: {
|
||||
method: 'GET',
|
||||
path: '/test/_search',
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar'
|
||||
},
|
||||
options: {},
|
||||
id: 1
|
||||
},
|
||||
connection: {
|
||||
id: 'http://localhost:9200'
|
||||
},
|
||||
attempts: 0,
|
||||
aborted: false
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
test('Remove an event', t => {
|
||||
t.plan(4)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.on(events.REQUEST, onRequest)
|
||||
function onRequest (err, request) {
|
||||
t.error(err)
|
||||
t.match(request, {
|
||||
body: null,
|
||||
statusCode: null,
|
||||
headers: null,
|
||||
warnings: null,
|
||||
meta: {
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
request: {
|
||||
params: {
|
||||
method: 'GET',
|
||||
path: '/test/_search',
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar'
|
||||
},
|
||||
options: {},
|
||||
id: 1
|
||||
},
|
||||
connection: {
|
||||
id: 'http://localhost:9200'
|
||||
},
|
||||
attempts: 0,
|
||||
aborted: false
|
||||
}
|
||||
})
|
||||
|
||||
client.off('request', onRequest)
|
||||
}
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
test('Should emit a response event in case of a successful response', t => {
|
||||
t.plan(3)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.on(events.RESPONSE, (err, request) => {
|
||||
t.error(err)
|
||||
t.match(request, {
|
||||
body: { hello: 'world' },
|
||||
statusCode: 200,
|
||||
headers: {
|
||||
'content-type': 'application/json;utf=8',
|
||||
connection: 'keep-alive'
|
||||
},
|
||||
warnings: null,
|
||||
meta: {
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
request: {
|
||||
params: {
|
||||
method: 'GET',
|
||||
path: '/test/_search',
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar'
|
||||
},
|
||||
options: {},
|
||||
id: 1
|
||||
},
|
||||
connection: {
|
||||
id: 'http://localhost:9200'
|
||||
},
|
||||
attempts: 0,
|
||||
aborted: false
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, result) => {
|
||||
t.error(err)
|
||||
})
|
||||
})
|
||||
|
||||
test('Should emit a response event with the error set', t => {
|
||||
t.plan(3)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnectionTimeout,
|
||||
maxRetries: 0
|
||||
})
|
||||
|
||||
client.on(events.RESPONSE, (err, request) => {
|
||||
t.ok(err instanceof TimeoutError)
|
||||
t.match(request, {
|
||||
body: null,
|
||||
statusCode: null,
|
||||
headers: null,
|
||||
warnings: null,
|
||||
meta: {
|
||||
context: null,
|
||||
name: 'elasticsearch-js',
|
||||
request: {
|
||||
params: {
|
||||
method: 'GET',
|
||||
path: '/test/_search',
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar'
|
||||
},
|
||||
options: {
|
||||
requestTimeout: 500
|
||||
},
|
||||
id: 1
|
||||
},
|
||||
connection: {
|
||||
id: 'http://localhost:9200'
|
||||
},
|
||||
attempts: 0,
|
||||
aborted: false
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, {
|
||||
requestTimeout: 500
|
||||
}, (err, result) => {
|
||||
t.ok(err instanceof TimeoutError)
|
||||
})
|
||||
})
|
||||
|
||||
test('Emit event', t => {
|
||||
t.plan(2)
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
client.on(events.REQUEST, (err, request) => {
|
||||
t.error(err)
|
||||
t.same(request, { hello: 'world' })
|
||||
})
|
||||
|
||||
client.emit(events.REQUEST, null, { hello: 'world' })
|
||||
})
|
||||
@ -17,19 +17,23 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
import * as http from 'http'
|
||||
import { createReadStream } from 'fs'
|
||||
import { join } from 'path'
|
||||
import split from 'split2'
|
||||
import FakeTimers from '@sinonjs/fake-timers'
|
||||
import { test } from 'tap'
|
||||
import { Client, errors } from '../../../'
|
||||
import { buildServer, connection } from '../../utils'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
const { test } = require('tap')
|
||||
const { errors } = require('../../../')
|
||||
const { Client, buildServer, connection } = require('../../utils')
|
||||
let clientVersion = require('../../../package.json').version
|
||||
let clientVersion: string = require('../../../package.json').version // eslint-disable-line
|
||||
if (clientVersion.includes('-')) {
|
||||
clientVersion = clientVersion.slice(0, clientVersion.indexOf('-')) + 'p'
|
||||
}
|
||||
let transportVersion: string = require('@elastic/transport/package.json').version // eslint-disable-line
|
||||
if (transportVersion.includes('-')) {
|
||||
transportVersion = transportVersion.slice(0, transportVersion.indexOf('-')) + 'p'
|
||||
}
|
||||
const nodeVersion = process.versions.node
|
||||
|
||||
const dataset = [
|
||||
@ -46,9 +50,10 @@ test('bulk index', t => {
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=bp`
|
||||
'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8',
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${transportVersion},hc=${nodeVersion},h=bp`
|
||||
})
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -90,10 +95,11 @@ test('bulk index', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
t.notMatch(params.headers, {
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=bp`
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${transportVersion},hc=${nodeVersion},h=bp`
|
||||
})
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -135,7 +141,8 @@ test('bulk index', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
t.equal(params.body.split('\n').filter(Boolean).length, 6)
|
||||
return { body: { errors: false, items: new Array(3).fill({}) } }
|
||||
}
|
||||
@ -179,7 +186,8 @@ test('bulk index', t => {
|
||||
return { body: { acknowledged: true } }
|
||||
} else {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -224,7 +232,8 @@ test('bulk index', t => {
|
||||
return { body: { acknowledged: true } }
|
||||
} else {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -265,7 +274,8 @@ test('bulk index', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -286,7 +296,7 @@ test('bulk index', t => {
|
||||
return {
|
||||
index: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
_id: String(id++)
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -307,9 +317,9 @@ test('bulk index', t => {
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (retry)', async t => {
|
||||
async function handler (req, res) {
|
||||
async function handler (req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
t.equal(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(req.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
|
||||
let body = ''
|
||||
req.setEncoding('utf8')
|
||||
@ -376,7 +386,7 @@ test('bulk index', t => {
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (retry a single document from batch)', async t => {
|
||||
function handler (req, res) {
|
||||
function handler (req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
res.setHeader('content-type', 'application/json')
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
@ -425,9 +435,9 @@ test('bulk index', t => {
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (failure)', async t => {
|
||||
async function handler (req, res) {
|
||||
async function handler (req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
t.equal(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(req.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
|
||||
let body = ''
|
||||
req.setEncoding('utf8')
|
||||
@ -524,7 +534,7 @@ test('bulk index', t => {
|
||||
try {
|
||||
await b
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
}
|
||||
})
|
||||
@ -560,15 +570,15 @@ test('bulk index', t => {
|
||||
try {
|
||||
await b
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
}
|
||||
})
|
||||
|
||||
t.test('Should abort a bulk request', async t => {
|
||||
async function handler (req, res) {
|
||||
async function handler (req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
t.equal(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(req.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
|
||||
let body = ''
|
||||
req.setEncoding('utf8')
|
||||
@ -646,6 +656,7 @@ test('bulk index', t => {
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
// @ts-expect-error
|
||||
onDocument (doc) {
|
||||
return {
|
||||
foo: { _index: 'test' }
|
||||
@ -667,7 +678,8 @@ test('bulk index', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -690,7 +702,7 @@ test('bulk index', t => {
|
||||
return {
|
||||
index: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
_id: String(id++)
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -719,7 +731,8 @@ test('bulk index', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -775,7 +788,8 @@ test('bulk create', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { create: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -796,7 +810,7 @@ test('bulk create', t => {
|
||||
return {
|
||||
create: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
_id: String(id++)
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -824,7 +838,8 @@ test('bulk update', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), { doc: dataset[count++], doc_as_upsert: true })
|
||||
@ -845,7 +860,7 @@ test('bulk update', t => {
|
||||
return [{
|
||||
update: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
_id: String(id++)
|
||||
}
|
||||
}, {
|
||||
doc_as_upsert: true
|
||||
@ -872,7 +887,8 @@ test('bulk update', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), { doc: dataset[count++] })
|
||||
@ -893,9 +909,9 @@ test('bulk update', t => {
|
||||
return [{
|
||||
update: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
_id: String(id++)
|
||||
}
|
||||
}]
|
||||
}, {}]
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
@ -917,11 +933,12 @@ test('bulk update', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), { doc: dataset[count++], doc_as_upsert: true })
|
||||
t.same(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), { doc: dataset[count++], doc_as_upsert: true })
|
||||
return { body: { errors: false, items: [{ update: { result: 'noop' } }] } }
|
||||
}
|
||||
})
|
||||
@ -939,7 +956,7 @@ test('bulk update', t => {
|
||||
return [{
|
||||
update: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
_id: String(id++)
|
||||
}
|
||||
}, {
|
||||
doc_as_upsert: true
|
||||
@ -971,7 +988,8 @@ test('bulk delete', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
t.same(JSON.parse(params.body), { delete: { _index: 'test', _id: count++ } })
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
@ -990,7 +1008,7 @@ test('bulk delete', t => {
|
||||
return {
|
||||
delete: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
_id: String(id++)
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -1011,9 +1029,9 @@ test('bulk delete', t => {
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (failure)', async t => {
|
||||
async function handler (req, res) {
|
||||
async function handler (req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
t.equal(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(req.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
|
||||
let body = ''
|
||||
req.setEncoding('utf8')
|
||||
@ -1023,7 +1041,7 @@ test('bulk delete', t => {
|
||||
|
||||
res.setHeader('content-type', 'application/json')
|
||||
|
||||
if (JSON.parse(body).delete._id === 1) {
|
||||
if (JSON.parse(body).delete._id === '1') {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: true,
|
||||
@ -1055,7 +1073,7 @@ test('bulk delete', t => {
|
||||
return {
|
||||
delete: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
_id: String(id++)
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -1094,7 +1112,7 @@ test('transport options', t => {
|
||||
|
||||
if (params.path === '/_bulk') {
|
||||
t.match(params.headers, {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8',
|
||||
foo: 'bar'
|
||||
})
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
@ -1152,6 +1170,7 @@ test('errors', t => {
|
||||
})
|
||||
try {
|
||||
await client.helpers.bulk({
|
||||
// @ts-expect-error
|
||||
datasource: 'hello',
|
||||
onDocument (doc) {
|
||||
return {
|
||||
@ -1159,7 +1178,7 @@ test('errors', t => {
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'bulk helper: the datasource must be an array or a buffer or a readable stream or an async generator')
|
||||
}
|
||||
@ -1170,6 +1189,7 @@ test('errors', t => {
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
try {
|
||||
// @ts-expect-error
|
||||
await client.helpers.bulk({
|
||||
onDocument (doc) {
|
||||
return {
|
||||
@ -1177,7 +1197,7 @@ test('errors', t => {
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'bulk helper: the datasource is required')
|
||||
}
|
||||
@ -1188,10 +1208,11 @@ test('errors', t => {
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
try {
|
||||
// @ts-expect-error
|
||||
await client.helpers.bulk({
|
||||
datasource: dataset.slice()
|
||||
})
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'bulk helper: the onDocument callback is required')
|
||||
}
|
||||
@ -1209,7 +1230,8 @@ test('Flush interval', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -1261,7 +1283,8 @@ test('Flush interval', t => {
|
||||
onRequest (params) {
|
||||
t.ok(count < 2)
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.match(params.headers, { 'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8' })
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
@ -1282,6 +1305,7 @@ test('Flush interval', t => {
|
||||
// Needed otherwise in Node.js 10
|
||||
// the second request will never be sent
|
||||
await Promise.resolve()
|
||||
// @ts-ignore
|
||||
b.abort()
|
||||
}
|
||||
yield chunk
|
||||
@ -1316,14 +1340,15 @@ test('Flush interval', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=bp`
|
||||
'content-type': 'application/vnd.elasticsearch+x-ndjson; compatible-with=8',
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${transportVersion},hc=${nodeVersion},h=bp`
|
||||
})
|
||||
// @ts-expect-error
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -17,12 +17,10 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { errors } = require('../../../')
|
||||
const { Client, connection } = require('../../utils')
|
||||
const FakeTimers = require('@sinonjs/fake-timers')
|
||||
import { test } from 'tap'
|
||||
import { Client, errors } from '../../../'
|
||||
import { connection } from '../../utils'
|
||||
import FakeTimers from '@sinonjs/fake-timers'
|
||||
|
||||
test('Basic', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
@ -77,7 +75,7 @@ test('Basic', async t => {
|
||||
})
|
||||
|
||||
test('Multiple searches (inside async iterator)', t => {
|
||||
t.plan(6)
|
||||
t.plan(4)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
@ -114,51 +112,53 @@ test('Multiple searches (inside async iterator)', t => {
|
||||
|
||||
const m = client.helpers.msearch({ operations: 2 })
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } })
|
||||
.then(result => {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
.catch(t.error)
|
||||
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } })
|
||||
.then(result => {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
t.same(result.documents, [
|
||||
{ four: 'four' },
|
||||
{ five: 'five' },
|
||||
{ six: 'six' }
|
||||
])
|
||||
})
|
||||
|
||||
t.same(result.documents, [
|
||||
{ four: 'four' },
|
||||
{ five: 'five' },
|
||||
{ six: 'six' }
|
||||
])
|
||||
})
|
||||
.catch(t.error)
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Multiple searches (async iterator exits)', t => {
|
||||
t.plan(6)
|
||||
t.plan(4)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
@ -195,45 +195,47 @@ test('Multiple searches (async iterator exits)', t => {
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
m.search({ index: 'test' }, { query: {} })
|
||||
.then(result => {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
.catch(t.error)
|
||||
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
m.search({ index: 'test' }, { query: {} })
|
||||
.then(result => {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { four: 'four' } },
|
||||
{ _source: { five: 'five' } },
|
||||
{ _source: { six: 'six' } }
|
||||
]
|
||||
}
|
||||
t.same(result.documents, [
|
||||
{ four: 'four' },
|
||||
{ five: 'five' },
|
||||
{ six: 'six' }
|
||||
])
|
||||
})
|
||||
|
||||
t.same(result.documents, [
|
||||
{ four: 'four' },
|
||||
{ five: 'five' },
|
||||
{ six: 'six' }
|
||||
])
|
||||
})
|
||||
.catch(t.error)
|
||||
|
||||
setImmediate(() => m.stop())
|
||||
})
|
||||
@ -241,7 +243,7 @@ test('Multiple searches (async iterator exits)', t => {
|
||||
test('Stop a msearch processor (promises)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
return { body: {} }
|
||||
}
|
||||
})
|
||||
|
||||
@ -259,19 +261,19 @@ test('Stop a msearch processor (promises)', async t => {
|
||||
{ index: 'test' },
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.equal(err.message, 'The msearch processor has been stopped')
|
||||
}
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Stop a msearch processor (callbacks)', t => {
|
||||
test('Bad header', t => {
|
||||
t.plan(1)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
return { body: {} }
|
||||
}
|
||||
})
|
||||
|
||||
@ -282,33 +284,7 @@ test('Stop a msearch processor (callbacks)', t => {
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.stop()
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.equal(err.message, 'The msearch processor has been stopped')
|
||||
})
|
||||
})
|
||||
|
||||
test('Bad header', t => {
|
||||
t.plan(2)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(null, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.equal(err.message, 'The header should be an object')
|
||||
})
|
||||
|
||||
// @ts-expect-error
|
||||
m.search(null, { query: { match: { foo: 'bar' } } })
|
||||
.catch(err => {
|
||||
t.equal(err.message, 'The header should be an object')
|
||||
@ -318,11 +294,11 @@ test('Bad header', t => {
|
||||
})
|
||||
|
||||
test('Bad body', t => {
|
||||
t.plan(2)
|
||||
t.plan(1)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
return { body: {} }
|
||||
}
|
||||
})
|
||||
|
||||
@ -333,10 +309,7 @@ test('Bad body', t => {
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, null, (err, result) => {
|
||||
t.equal(err.message, 'The body should be an object')
|
||||
})
|
||||
|
||||
// @ts-expect-error
|
||||
m.search({ index: 'test' }, null)
|
||||
.catch(err => {
|
||||
t.equal(err.message, 'The body should be an object')
|
||||
@ -435,7 +408,7 @@ test('Single search errors', async t => {
|
||||
{ index: 'test' },
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
}
|
||||
|
||||
@ -443,7 +416,7 @@ test('Single search errors', async t => {
|
||||
})
|
||||
|
||||
test('Entire msearch fails', t => {
|
||||
t.plan(4)
|
||||
t.plan(2)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
@ -464,15 +437,15 @@ test('Entire msearch fails', t => {
|
||||
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.same(result.documents, [])
|
||||
})
|
||||
m.search({ index: 'test' }, { query: {} })
|
||||
.catch(err => {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.same(result.documents, [])
|
||||
})
|
||||
m.search({ index: 'test' }, { query: {} })
|
||||
.catch(err => {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
@ -482,7 +455,7 @@ test('Resolves the msearch helper', t => {
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
return { body: {} }
|
||||
}
|
||||
})
|
||||
|
||||
@ -508,7 +481,7 @@ test('Stop the msearch helper with an error', t => {
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {}
|
||||
return { body: {} }
|
||||
}
|
||||
})
|
||||
|
||||
@ -528,13 +501,14 @@ test('Stop the msearch helper with an error', t => {
|
||||
|
||||
m.catch(err => t.equal(err.message, 'kaboom'))
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.equal(err.message, 'kaboom')
|
||||
})
|
||||
m.search({ index: 'test' }, { query: {} })
|
||||
.catch(err => {
|
||||
t.equal(err.message, 'kaboom')
|
||||
})
|
||||
})
|
||||
|
||||
test('Multiple searches (concurrency = 1)', t => {
|
||||
t.plan(6)
|
||||
t.plan(4)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
@ -562,51 +536,53 @@ test('Multiple searches (concurrency = 1)', t => {
|
||||
|
||||
const m = client.helpers.msearch({ operations: 1, concurrency: 1 })
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } })
|
||||
.then(result => {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
.catch(t.error)
|
||||
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } })
|
||||
.then(result => {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
.catch(t.error)
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
})
|
||||
|
||||
test('Flush interval', t => {
|
||||
t.plan(4)
|
||||
t.plan(2)
|
||||
const clock = FakeTimers.install({ toFake: ['setTimeout', 'clearTimeout'] })
|
||||
t.teardown(() => clock.uninstall())
|
||||
|
||||
@ -645,15 +621,15 @@ test('Flush interval', t => {
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } })
|
||||
.then(result => {
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } })
|
||||
.then(result => {
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
|
||||
setImmediate(clock.next)
|
||||
|
||||
@ -661,7 +637,7 @@ test('Flush interval', t => {
|
||||
})
|
||||
|
||||
test('Flush interval - early stop', t => {
|
||||
t.plan(3)
|
||||
t.plan(2)
|
||||
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
@ -689,15 +665,16 @@ test('Flush interval - early stop', t => {
|
||||
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } })
|
||||
.then(result => {
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
|
||||
setImmediate(() => {
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
})
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } })
|
||||
.catch(err => {
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
})
|
||||
})
|
||||
|
||||
m.stop()
|
||||
@ -17,15 +17,18 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
import { test } from 'tap'
|
||||
import { Client, errors } from '../../../'
|
||||
import { connection } from '../../utils'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { errors } = require('../../../')
|
||||
const { Client, connection } = require('../../utils')
|
||||
let clientVersion = require('../../../package.json').version
|
||||
let clientVersion: string = require('../../../package.json').version // eslint-disable-line
|
||||
if (clientVersion.includes('-')) {
|
||||
clientVersion = clientVersion.slice(0, clientVersion.indexOf('-')) + 'p'
|
||||
}
|
||||
let transportVersion: string = require('@elastic/transport/package.json').version // eslint-disable-line
|
||||
if (transportVersion.includes('-')) {
|
||||
transportVersion = transportVersion.slice(0, transportVersion.indexOf('-')) + 'p'
|
||||
}
|
||||
const nodeVersion = process.versions.node
|
||||
|
||||
test('Scroll search', async t => {
|
||||
@ -33,12 +36,17 @@ test('Scroll search', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.match(params.headers, {
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=s`
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${transportVersion},hc=${nodeVersion},h=s`
|
||||
})
|
||||
|
||||
count += 1
|
||||
if (params.method === 'POST') {
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
if (params.path === '/test/_search') {
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
t.equal(JSON.parse(params.body).scroll, '1m')
|
||||
}
|
||||
}
|
||||
if (count === 4) {
|
||||
// final automated clear
|
||||
@ -69,10 +77,11 @@ test('Scroll search', async t => {
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
// @ts-expect-error
|
||||
t.equal(result.body.count, count)
|
||||
t.equal(result.body._scroll_id, 'id')
|
||||
}
|
||||
@ -83,9 +92,10 @@ test('Clear a scroll search', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.notMatch(params.headers, {
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=s`
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${transportVersion},hc=${nodeVersion},h=s`
|
||||
})
|
||||
if (params.method === 'DELETE') {
|
||||
// @ts-expect-error
|
||||
const body = JSON.parse(params.body)
|
||||
t.equal(body.scroll_id, 'id')
|
||||
}
|
||||
@ -113,13 +123,14 @@ test('Clear a scroll search', async t => {
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
if (count === 2) {
|
||||
t.fail('The scroll search should be cleared')
|
||||
}
|
||||
// @ts-expect-error
|
||||
t.equal(result.body.count, count)
|
||||
if (count === 1) {
|
||||
await result.clear()
|
||||
@ -166,13 +177,15 @@ test('Scroll search (retry)', async t => {
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
}, {
|
||||
wait: 10
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
// @ts-expect-error
|
||||
t.equal(result.body.count, count)
|
||||
// @ts-expect-error
|
||||
t.not(result.body.count, 1)
|
||||
t.equal(result.body._scroll_id, 'id')
|
||||
}
|
||||
@ -197,17 +210,18 @@ test('Scroll search (retry throws and maxRetries)', async t => {
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
}, {
|
||||
wait: 10,
|
||||
ignore: [404]
|
||||
})
|
||||
|
||||
try {
|
||||
// @ts-expect-error
|
||||
for await (const result of scrollSearch) { // eslint-disable-line
|
||||
t.fail('we should not be here')
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.equal(err.statusCode, 429)
|
||||
t.equal(count, expectedAttempts)
|
||||
@ -222,7 +236,14 @@ test('Scroll search (retry throws later)', async t => {
|
||||
onRequest (params) {
|
||||
count += 1
|
||||
// filter_path should not be added if is not already present
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
if (params.method === 'POST') {
|
||||
if (params.path === '/test/_search') {
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
t.equal(JSON.parse(params.body).scroll, '1m')
|
||||
}
|
||||
}
|
||||
if (count > 1) {
|
||||
return { body: {}, statusCode: 429 }
|
||||
}
|
||||
@ -251,16 +272,17 @@ test('Scroll search (retry throws later)', async t => {
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
}, {
|
||||
wait: 10
|
||||
})
|
||||
|
||||
try {
|
||||
for await (const result of scrollSearch) { // eslint-disable-line
|
||||
// @ts-expect-error
|
||||
t.equal(result.body.count, count)
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.equal(err.statusCode, 429)
|
||||
t.equal(count, expectedAttempts)
|
||||
@ -275,8 +297,7 @@ test('Scroll search documents', async t => {
|
||||
t.equal(params.querystring, 'filter_path=hits.hits._source%2C_scroll_id&scroll=1m')
|
||||
} else {
|
||||
if (params.method !== 'DELETE') {
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
t.equal(params.body, '{"scroll_id":"id"}')
|
||||
t.equal(params.body, '{"scroll":"1m","scroll_id":"id"}')
|
||||
}
|
||||
}
|
||||
return {
|
||||
@ -304,7 +325,7 @@ test('Scroll search documents', async t => {
|
||||
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
})
|
||||
|
||||
let n = 1
|
||||
@ -337,17 +358,18 @@ test('Should not retry if maxRetries = 0', async t => {
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
}, {
|
||||
wait: 10,
|
||||
ignore: [404]
|
||||
})
|
||||
|
||||
try {
|
||||
// @ts-expect-error
|
||||
for await (const result of scrollSearch) { // eslint-disable-line
|
||||
t.fail('we should not be here')
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.equal(err.statusCode, 429)
|
||||
t.equal(count, expectedAttempts)
|
||||
@ -359,10 +381,17 @@ test('Fix querystring for scroll search', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (count === 0) {
|
||||
t.equal(params.querystring, 'size=1&scroll=1m')
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
} else {
|
||||
if (params.method !== 'DELETE') {
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
if (params.method === 'POST') {
|
||||
if (params.path === '/test/_search') {
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
t.equal(JSON.parse(params.body).scroll, '1m')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
@ -388,7 +417,7 @@ test('Fix querystring for scroll search', async t => {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
size: 1,
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
})
|
||||
|
||||
for await (const response of scrollSearch) {
|
||||
@ -17,10 +17,9 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { Client, connection } = require('../../utils')
|
||||
import { test } from 'tap'
|
||||
import { Client } from '../../../'
|
||||
import { connection } from '../../utils'
|
||||
|
||||
test('Search should have an additional documents property', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
@ -47,7 +46,7 @@ test('Search should have an additional documents property', async t => {
|
||||
|
||||
const result = await client.helpers.search({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
})
|
||||
t.same(result, [
|
||||
{ one: 'one' },
|
||||
@ -71,7 +70,7 @@ test('kGetHits fallback', async t => {
|
||||
|
||||
const result = await client.helpers.search({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
})
|
||||
t.same(result, [])
|
||||
})
|
||||
@ -102,46 +101,11 @@ test('Merge filter paths (snake_case)', async t => {
|
||||
const result = await client.helpers.search({
|
||||
index: 'test',
|
||||
filter_path: 'foo',
|
||||
body: { foo: 'bar' }
|
||||
query: { match_all: {} }
|
||||
})
|
||||
t.same(result, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
|
||||
test('Merge filter paths (camelCase)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.equal(params.querystring, 'filter_path=foo%2Chits.hits._source')
|
||||
return {
|
||||
body: {
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const result = await client.helpers.search({
|
||||
index: 'test',
|
||||
filterPath: 'foo',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.same(result, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
})
|
||||
@ -1,42 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { roundRobinSelector, randomSelector } = require('../../lib/Transport').internals
|
||||
|
||||
test('RoundRobinSelector', t => {
|
||||
const selector = roundRobinSelector()
|
||||
const arr = [0, 1, 2, 3, 4, 5]
|
||||
|
||||
t.plan(arr.length + 1)
|
||||
for (let i = 0; i <= arr.length; i++) {
|
||||
t.equal(
|
||||
selector(arr),
|
||||
i === arr.length ? arr[0] : arr[i]
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
test('RandomSelector', t => {
|
||||
t.plan(1)
|
||||
const arr = [0, 1, 2, 3, 4, 5]
|
||||
t.type(randomSelector(arr), 'number')
|
||||
})
|
||||
@ -1,231 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { stringify } = require('querystring')
|
||||
const Serializer = require('../../lib/Serializer')
|
||||
const { SerializationError, DeserializationError } = require('../../lib/errors')
|
||||
|
||||
test('Basic', t => {
|
||||
t.plan(2)
|
||||
const s = new Serializer()
|
||||
const obj = { hello: 'world' }
|
||||
const json = JSON.stringify(obj)
|
||||
t.equal(s.serialize(obj), json)
|
||||
t.same(s.deserialize(json), obj)
|
||||
})
|
||||
|
||||
test('ndserialize', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
const obj = [
|
||||
{ hello: 'world' },
|
||||
{ winter: 'is coming' },
|
||||
{ you_know: 'for search' }
|
||||
]
|
||||
t.equal(
|
||||
s.ndserialize(obj),
|
||||
JSON.stringify(obj[0]) + '\n' +
|
||||
JSON.stringify(obj[1]) + '\n' +
|
||||
JSON.stringify(obj[2]) + '\n'
|
||||
)
|
||||
})
|
||||
|
||||
test('ndserialize (strings)', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
const obj = [
|
||||
JSON.stringify({ hello: 'world' }),
|
||||
JSON.stringify({ winter: 'is coming' }),
|
||||
JSON.stringify({ you_know: 'for search' })
|
||||
]
|
||||
t.equal(
|
||||
s.ndserialize(obj),
|
||||
obj[0] + '\n' +
|
||||
obj[1] + '\n' +
|
||||
obj[2] + '\n'
|
||||
)
|
||||
})
|
||||
|
||||
test('qserialize', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
const obj = {
|
||||
hello: 'world',
|
||||
you_know: 'for search'
|
||||
}
|
||||
|
||||
t.equal(
|
||||
s.qserialize(obj),
|
||||
stringify(obj)
|
||||
)
|
||||
})
|
||||
|
||||
test('qserialize (array)', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
const obj = {
|
||||
hello: 'world',
|
||||
arr: ['foo', 'bar']
|
||||
}
|
||||
|
||||
t.equal(
|
||||
s.qserialize(obj),
|
||||
'hello=world&arr=foo%2Cbar'
|
||||
)
|
||||
})
|
||||
|
||||
test('qserialize (string)', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
const obj = {
|
||||
hello: 'world',
|
||||
you_know: 'for search'
|
||||
}
|
||||
|
||||
t.equal(
|
||||
s.qserialize(stringify(obj)),
|
||||
stringify(obj)
|
||||
)
|
||||
})
|
||||
|
||||
test('qserialize (key with undefined value)', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
const obj = {
|
||||
hello: 'world',
|
||||
key: undefined,
|
||||
foo: 'bar'
|
||||
}
|
||||
|
||||
t.equal(
|
||||
s.qserialize(obj),
|
||||
'hello=world&foo=bar'
|
||||
)
|
||||
})
|
||||
|
||||
test('SerializationError', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
const obj = { hello: 'world' }
|
||||
obj.o = obj
|
||||
try {
|
||||
s.serialize(obj)
|
||||
t.fail('Should fail')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof SerializationError)
|
||||
}
|
||||
})
|
||||
|
||||
test('SerializationError ndserialize', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
try {
|
||||
s.ndserialize({ hello: 'world' })
|
||||
t.fail('Should fail')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof SerializationError)
|
||||
}
|
||||
})
|
||||
|
||||
test('DeserializationError', t => {
|
||||
t.plan(1)
|
||||
const s = new Serializer()
|
||||
const json = '{"hello'
|
||||
try {
|
||||
s.deserialize(json)
|
||||
t.fail('Should fail')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof DeserializationError)
|
||||
}
|
||||
})
|
||||
|
||||
test('prototype poisoning protection', t => {
|
||||
t.plan(2)
|
||||
const s = new Serializer()
|
||||
try {
|
||||
s.deserialize('{"__proto__":{"foo":"bar"}}')
|
||||
t.fail('Should fail')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof DeserializationError)
|
||||
}
|
||||
|
||||
try {
|
||||
s.deserialize('{"constructor":{"prototype":{"foo":"bar"}}}')
|
||||
t.fail('Should fail')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof DeserializationError)
|
||||
}
|
||||
})
|
||||
|
||||
test('disable prototype poisoning protection', t => {
|
||||
t.plan(2)
|
||||
const s = new Serializer({ disablePrototypePoisoningProtection: true })
|
||||
try {
|
||||
s.deserialize('{"__proto__":{"foo":"bar"}}')
|
||||
t.pass('Should not fail')
|
||||
} catch (err) {
|
||||
t.fail(err)
|
||||
}
|
||||
|
||||
try {
|
||||
s.deserialize('{"constructor":{"prototype":{"foo":"bar"}}}')
|
||||
t.pass('Should not fail')
|
||||
} catch (err) {
|
||||
t.fail(err)
|
||||
}
|
||||
})
|
||||
|
||||
test('disable prototype poisoning protection only for proto', t => {
|
||||
t.plan(2)
|
||||
const s = new Serializer({ disablePrototypePoisoningProtection: 'proto' })
|
||||
try {
|
||||
s.deserialize('{"__proto__":{"foo":"bar"}}')
|
||||
t.pass('Should not fail')
|
||||
} catch (err) {
|
||||
t.fail(err)
|
||||
}
|
||||
|
||||
try {
|
||||
s.deserialize('{"constructor":{"prototype":{"foo":"bar"}}}')
|
||||
t.fail('Should fail')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof DeserializationError)
|
||||
}
|
||||
})
|
||||
|
||||
test('disable prototype poisoning protection only for constructor', t => {
|
||||
t.plan(2)
|
||||
const s = new Serializer({ disablePrototypePoisoningProtection: 'constructor' })
|
||||
try {
|
||||
s.deserialize('{"__proto__":{"foo":"bar"}}')
|
||||
t.fail('Should fail')
|
||||
} catch (err) {
|
||||
t.ok(err instanceof DeserializationError)
|
||||
}
|
||||
|
||||
try {
|
||||
s.deserialize('{"constructor":{"prototype":{"foo":"bar"}}}')
|
||||
t.pass('Should not fail')
|
||||
} catch (err) {
|
||||
t.fail(err)
|
||||
}
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user