Client helpers (#1107)
* Added client helpers * Updated test * The search helper should return only the documents * Added code comments * Fixed bug * Updated test * Removed bulkSize and added flushBytes * Updated test * Added concurrency * Updated test * Added support for 429 handling in the scroll search helper * Updated test * Updated stats count * Updated test * Fix test * Use client maxRetries as default * Updated type definitions * Refactored bulk helper to be more consistent with the client api * Updated test * Improved error handling, added refreshOnCompletion option and forward additinal options to the bulk api * Updated type definitions * Updated test * Fixed test on Node v8 * Updated test * Added TODO * Updated docs * Added Node v8 note * Updated scripts * Removed useless files * Added helpers to integration test * Fix cli argument position * Moar fixes * Test run elasticsearch in github actions * Use master action version * Add vm.max_map_count step * Test new action setup * Added Configure sysctl limits step * Updated action to latest version * Don't run helpers integration test in jenkins * Run helpers integratino test also with Node v10 * Updated docs * Updated docs * Updated helpers type definitions * Added test for helpers type definitions * Added license header
This commit is contained in:
committed by
GitHub
parent
6c82a4967e
commit
d7836a16af
3
test/fixtures/small-dataset.ndjson
vendored
Normal file
3
test/fixtures/small-dataset.ndjson
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
{"user":"jon","age":23}
|
||||
{"user":"arya","age":18}
|
||||
{"user":"tyrion","age":39}
|
||||
5000
test/fixtures/stackoverflow.ndjson
vendored
Normal file
5000
test/fixtures/stackoverflow.ndjson
vendored
Normal file
File diff suppressed because one or more lines are too long
189
test/integration/helpers/bulk.test.js
Normal file
189
test/integration/helpers/bulk.test.js
Normal file
@ -0,0 +1,189 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const datasetPath = join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson')
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('bulk index', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: INDEX,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body } = await client.count({ index: INDEX })
|
||||
t.match(body, { count: 5000 })
|
||||
})
|
||||
|
||||
test('bulk index with custom id', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split(JSON.parse)),
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body } = await client.get({
|
||||
index: INDEX,
|
||||
id: '19273860' // id of document n° 4242
|
||||
})
|
||||
|
||||
t.strictEqual(body._index, INDEX)
|
||||
t.strictEqual(body._id, '19273860')
|
||||
t.strictEqual(body._source.id, '19273860')
|
||||
})
|
||||
|
||||
test('abort the operation on document drop', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const b = client.helpers.bulk({
|
||||
datasource: stream.pipe(split(JSON.parse)),
|
||||
concurrency: 1,
|
||||
onDrop (doc) {
|
||||
t.strictEqual(doc.status, 400)
|
||||
t.strictEqual(doc.error.type, 'mapper_parsing_exception')
|
||||
t.strictEqual(doc.document.id, '45924372')
|
||||
b.abort()
|
||||
},
|
||||
onDocument (doc) {
|
||||
if (doc.id === '45924372') { // id of document n° 500
|
||||
// this will break the mapping
|
||||
doc.title = { foo: 'bar' }
|
||||
}
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const result = await b
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.strictEqual(result.total - 1, result.successful)
|
||||
t.match(result, {
|
||||
retry: 0,
|
||||
failed: 1,
|
||||
aborted: true
|
||||
})
|
||||
})
|
||||
|
||||
test('bulk delete', async t => {
|
||||
const indexResult = await client.helpers.bulk({
|
||||
datasource: createReadStream(datasetPath).pipe(split(JSON.parse)),
|
||||
refreshOnCompletion: true,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(indexResult.time, 'number')
|
||||
t.type(indexResult.bytes, 'number')
|
||||
t.match(indexResult, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body: afterIndex } = await client.count({ index: INDEX })
|
||||
t.match(afterIndex, { count: 5000 })
|
||||
|
||||
const deleteResult = await client.helpers.bulk({
|
||||
datasource: createReadStream(datasetPath).pipe(split(JSON.parse)),
|
||||
refreshOnCompletion: true,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
delete: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(deleteResult.time, 'number')
|
||||
t.type(deleteResult.bytes, 'number')
|
||||
t.match(deleteResult, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body: afterDelete } = await client.count({ index: INDEX })
|
||||
t.match(afterDelete, { count: 0 })
|
||||
})
|
||||
103
test/integration/helpers/scroll.test.js
Normal file
103
test/integration/helpers/scroll.test.js
Normal file
@ -0,0 +1,103 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: true,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
if (result.failed > 0) {
|
||||
throw new Error('Failed bulk indexing docs')
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('search helper', async t => {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
var count = 0
|
||||
for await (const search of scrollSearch) {
|
||||
count += 1
|
||||
for (const doc of search.documents) {
|
||||
t.true(doc.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
}
|
||||
t.strictEqual(count, 11)
|
||||
})
|
||||
|
||||
test('clear a scroll search', async t => {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
var count = 0
|
||||
for await (const search of scrollSearch) {
|
||||
count += 1
|
||||
if (count === 2) {
|
||||
search.clear()
|
||||
}
|
||||
}
|
||||
t.strictEqual(count, 2)
|
||||
})
|
||||
|
||||
test('scroll documents', async t => {
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
var count = 0
|
||||
for await (const doc of scrollSearch) {
|
||||
count += 1
|
||||
t.true(doc.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
t.strictEqual(count, 106)
|
||||
})
|
||||
56
test/integration/helpers/search.test.js
Normal file
56
test/integration/helpers/search.test.js
Normal file
@ -0,0 +1,56 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: true,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
if (result.failed > 0) {
|
||||
throw new Error('Failed bulk indexing docs')
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('search helper', async t => {
|
||||
const results = await client.helpers.search({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
t.strictEqual(results.length, 10)
|
||||
for (const result of results) {
|
||||
t.true(result.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
})
|
||||
330
test/types/helpers.test-d.ts
Normal file
330
test/types/helpers.test-d.ts
Normal file
@ -0,0 +1,330 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
import { expectType, expectError, expectAssignable } from 'tsd'
|
||||
import { Client } from '../../'
|
||||
import { RequestBody, ResponseBody } from '../../lib/Transport'
|
||||
import {
|
||||
BulkHelper,
|
||||
BulkStats,
|
||||
BulkHelperOptions,
|
||||
ScrollSearchResponse
|
||||
} from '../../lib/Helpers'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
/// .helpers.bulk
|
||||
|
||||
const b = client.helpers.bulk({
|
||||
datasource: [],
|
||||
onDocument (doc) {
|
||||
expectType<Record<string, any>>(doc)
|
||||
return { index: { _index: 'test' } }
|
||||
},
|
||||
flushBytes: 5000000,
|
||||
concurrency: 5,
|
||||
retries: 3,
|
||||
wait: 5000,
|
||||
onDrop (doc) {
|
||||
expectType<Record<string, any>>(doc)
|
||||
},
|
||||
refreshOnCompletion: true,
|
||||
pipeline: 'my-pipeline'
|
||||
})
|
||||
|
||||
expectType<BulkHelper<BulkStats>>(b)
|
||||
expectType<BulkHelper<BulkStats>>(b.abort())
|
||||
b.then(stats => expectType<BulkStats>(stats))
|
||||
|
||||
// body can't be provided
|
||||
expectError(
|
||||
client.helpers.bulk({
|
||||
datasource: [],
|
||||
onDocument (doc) {
|
||||
return { index: { _index: 'test' } }
|
||||
},
|
||||
body: []
|
||||
})
|
||||
)
|
||||
|
||||
// test onDocument actions
|
||||
// index
|
||||
{
|
||||
const options = {
|
||||
datasource: [],
|
||||
onDocument (doc: Record<string, any>) {
|
||||
return { index: { _index: 'test' } }
|
||||
}
|
||||
}
|
||||
expectAssignable<BulkHelperOptions>(options)
|
||||
}
|
||||
// create
|
||||
{
|
||||
const options = {
|
||||
datasource: [],
|
||||
onDocument (doc: Record<string, any>) {
|
||||
return { create: { _index: 'test' } }
|
||||
}
|
||||
}
|
||||
expectAssignable<BulkHelperOptions>(options)
|
||||
}
|
||||
// update
|
||||
{
|
||||
// without `:BulkHelperOptions` this test cannot pass
|
||||
// but if we write these options inline inside
|
||||
// a `.helper.bulk`, it works as expected
|
||||
const options: BulkHelperOptions = {
|
||||
datasource: [],
|
||||
onDocument (doc: Record<string, any>) {
|
||||
return [{ update: { _index: 'test' } }, doc]
|
||||
}
|
||||
}
|
||||
expectAssignable<BulkHelperOptions>(options)
|
||||
}
|
||||
// delete
|
||||
{
|
||||
const options = {
|
||||
datasource: [],
|
||||
onDocument (doc: Record<string, any>) {
|
||||
return { delete: { _index: 'test' } }
|
||||
}
|
||||
}
|
||||
expectAssignable<BulkHelperOptions>(options)
|
||||
}
|
||||
|
||||
/// .helpers.scrollSearch
|
||||
|
||||
// just search params
|
||||
{
|
||||
async function test () {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for await (const response of scrollSearch) {
|
||||
expectAssignable<ScrollSearchResponse>(response)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// search params and options
|
||||
{
|
||||
async function test () {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
}, { ignore: [404] })
|
||||
|
||||
for await (const response of scrollSearch) {
|
||||
expectAssignable<ScrollSearchResponse>(response)
|
||||
expectType<ResponseBody<Record<string, any>>>(response.body)
|
||||
expectType<unknown[]>(response.documents)
|
||||
expectType<unknown>(response.meta.context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// with type defs
|
||||
{
|
||||
interface SearchBody {
|
||||
query: {
|
||||
match: { foo: string }
|
||||
}
|
||||
}
|
||||
|
||||
interface ShardsResponse {
|
||||
total: number;
|
||||
successful: number;
|
||||
failed: number;
|
||||
skipped: number;
|
||||
}
|
||||
|
||||
interface Explanation {
|
||||
value: number;
|
||||
description: string;
|
||||
details: Explanation[];
|
||||
}
|
||||
|
||||
interface SearchResponse<T> {
|
||||
took: number;
|
||||
timed_out: boolean;
|
||||
_scroll_id?: string;
|
||||
_shards: ShardsResponse;
|
||||
hits: {
|
||||
total: number;
|
||||
max_score: number;
|
||||
hits: Array<{
|
||||
_index: string;
|
||||
_type: string;
|
||||
_id: string;
|
||||
_score: number;
|
||||
_source: T;
|
||||
_version?: number;
|
||||
_explanation?: Explanation;
|
||||
fields?: any;
|
||||
highlight?: any;
|
||||
inner_hits?: any;
|
||||
matched_queries?: string[];
|
||||
sort?: string[];
|
||||
}>;
|
||||
};
|
||||
aggregations?: any;
|
||||
}
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
|
||||
async function test () {
|
||||
const scrollSearch = client.helpers.scrollSearch<SearchBody, Source, SearchResponse<Source>, string>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for await (const response of scrollSearch) {
|
||||
expectAssignable<ScrollSearchResponse>(response)
|
||||
expectType<SearchResponse<Source>>(response.body)
|
||||
expectType<Source[]>(response.documents)
|
||||
expectType<string>(response.meta.context)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// .helpers.scrollDocuments
|
||||
|
||||
// just search params
|
||||
{
|
||||
async function test () {
|
||||
const scrollDocuments = client.helpers.scrollDocuments({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for await (const document of scrollDocuments) {
|
||||
expectType<unknown>(document)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// search params and options
|
||||
{
|
||||
async function test () {
|
||||
const scrollDocuments = client.helpers.scrollDocuments({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
}, { ignore: [404] })
|
||||
|
||||
for await (const document of scrollDocuments) {
|
||||
expectType<unknown>(document)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// with type defs
|
||||
{
|
||||
interface SearchBody {
|
||||
query: {
|
||||
match: { foo: string }
|
||||
}
|
||||
}
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
|
||||
async function test () {
|
||||
const scrollDocuments = client.helpers.scrollDocuments<SearchBody, Source>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
for await (const document of scrollDocuments) {
|
||||
expectType<Source>(document)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// .helpers.search
|
||||
|
||||
// just search params
|
||||
{
|
||||
const p = client.helpers.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expectType<Promise<unknown[]>>(p)
|
||||
expectType<unknown[]>(await p)
|
||||
}
|
||||
|
||||
// search params and options
|
||||
{
|
||||
const p = client.helpers.search({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
}, { ignore: [404] })
|
||||
|
||||
expectType<Promise<unknown[]>>(p)
|
||||
expectType<unknown[]>(await p)
|
||||
}
|
||||
|
||||
// with type defs
|
||||
{
|
||||
interface SearchBody {
|
||||
query: {
|
||||
match: { foo: string }
|
||||
}
|
||||
}
|
||||
|
||||
interface Source {
|
||||
foo: string
|
||||
}
|
||||
|
||||
const p = client.helpers.search<SearchBody, Source>({
|
||||
index: 'test',
|
||||
body: {
|
||||
query: {
|
||||
match: { foo: 'bar' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expectType<Promise<Source[]>>(p)
|
||||
expectType<Source[]>(await p)
|
||||
}
|
||||
813
test/unit/helpers/bulk.test.js
Normal file
813
test/unit/helpers/bulk.test.js
Normal file
@ -0,0 +1,813 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const semver = require('semver')
|
||||
const { test } = require('tap')
|
||||
const { Client, errors } = require('../../../')
|
||||
const { buildServer, connection } = require('../../utils')
|
||||
|
||||
const dataset = [
|
||||
{ user: 'jon', age: 23 },
|
||||
{ user: 'arya', age: 18 },
|
||||
{ user: 'tyrion', age: 39 }
|
||||
]
|
||||
|
||||
test('bulk index', t => {
|
||||
t.test('datasource as array', t => {
|
||||
t.test('Should perform a bulk request', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'Content-Type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (with concurrency)', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'Content-Type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 3,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (high flush size)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'Content-Type': 'application/x-ndjson' })
|
||||
t.strictEqual(params.body.split('\n').filter(Boolean).length, 6)
|
||||
return { body: { errors: false, items: new Array(3).fill({}) } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 5000000,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (custom action)', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'Content-Type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
let id = 0
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
}
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (retry)', async t => {
|
||||
if (semver.lt(process.versions.node, '10.0.0')) {
|
||||
t.skip('This test will not pass on Node v8')
|
||||
return
|
||||
}
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
|
||||
let body = ''
|
||||
req.setEncoding('utf8')
|
||||
for await (const chunk of req) {
|
||||
body += chunk
|
||||
}
|
||||
const [, payload] = body.split('\n')
|
||||
|
||||
res.setHeader('content-type', 'application/json')
|
||||
|
||||
if (JSON.parse(payload).user === 'arya') {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: true,
|
||||
items: [{
|
||||
index: {
|
||||
status: 429
|
||||
}
|
||||
}]
|
||||
}))
|
||||
} else {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: false,
|
||||
items: [{}]
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
const [{ port }, server] = await buildServer(handler)
|
||||
const client = new Client({ node: `http://localhost:${port}` })
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
wait: 10,
|
||||
retries: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.deepEqual(doc, {
|
||||
status: 429,
|
||||
error: null,
|
||||
operation: { index: { _index: 'test' } },
|
||||
document: { user: 'arya', age: 18 },
|
||||
retried: true
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 2,
|
||||
retry: 2,
|
||||
failed: 1,
|
||||
aborted: false
|
||||
})
|
||||
server.stop()
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (failure)', async t => {
|
||||
if (semver.lt(process.versions.node, '10.0.0')) {
|
||||
t.skip('This test will not pass on Node v8')
|
||||
return
|
||||
}
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
|
||||
let body = ''
|
||||
req.setEncoding('utf8')
|
||||
for await (const chunk of req) {
|
||||
body += chunk
|
||||
}
|
||||
const [, payload] = body.split('\n')
|
||||
|
||||
res.setHeader('content-type', 'application/json')
|
||||
|
||||
if (JSON.parse(payload).user === 'arya') {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: true,
|
||||
items: [{
|
||||
index: {
|
||||
status: 400,
|
||||
error: { something: 'went wrong' }
|
||||
}
|
||||
}]
|
||||
}))
|
||||
} else {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: false,
|
||||
items: [{}]
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
const [{ port }, server] = await buildServer(handler)
|
||||
const client = new Client({ node: `http://localhost:${port}` })
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
wait: 10,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.deepEqual(doc, {
|
||||
status: 400,
|
||||
error: { something: 'went wrong' },
|
||||
operation: { index: { _index: 'test' } },
|
||||
document: { user: 'arya', age: 18 },
|
||||
retried: false
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 2,
|
||||
retry: 0,
|
||||
failed: 1,
|
||||
aborted: false
|
||||
})
|
||||
server.stop()
|
||||
})
|
||||
|
||||
t.test('Server error', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: { somothing: 'went wrong' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const b = client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
await b
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
}
|
||||
})
|
||||
|
||||
t.test('Server error (high flush size, to trigger the finish error)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
statusCode: 500,
|
||||
body: { somothing: 'went wrong' }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const b = client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 5000000,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
await b
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
}
|
||||
})
|
||||
|
||||
t.test('Should abort a bulk request', async t => {
|
||||
if (semver.lt(process.versions.node, '10.0.0')) {
|
||||
t.skip('This test will not pass on Node v8')
|
||||
return
|
||||
}
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
|
||||
let body = ''
|
||||
req.setEncoding('utf8')
|
||||
for await (const chunk of req) {
|
||||
body += chunk
|
||||
}
|
||||
const [, payload] = body.split('\n')
|
||||
|
||||
res.setHeader('content-type', 'application/json')
|
||||
|
||||
if (JSON.parse(payload).user === 'arya') {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: true,
|
||||
items: [{
|
||||
index: {
|
||||
status: 400,
|
||||
error: { something: 'went wrong' }
|
||||
}
|
||||
}]
|
||||
}))
|
||||
} else {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: false,
|
||||
items: [{}]
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
const [{ port }, server] = await buildServer(handler)
|
||||
const client = new Client({ node: `http://localhost:${port}` })
|
||||
const b = client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
wait: 10,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
b.abort()
|
||||
}
|
||||
})
|
||||
|
||||
const result = await b
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 2,
|
||||
successful: 1,
|
||||
retry: 0,
|
||||
failed: 1,
|
||||
aborted: true
|
||||
})
|
||||
server.stop()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('datasource as stream', t => {
|
||||
t.test('Should perform a bulk request', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'Content-Type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'small-dataset.ndjson'), 'utf8')
|
||||
|
||||
let id = 0
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
}
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('bulk create', t => {
|
||||
t.test('Should perform a bulk request', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'Content-Type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { create: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
let id = 0
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
create: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
}
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('bulk update', t => {
|
||||
t.test('Should perform a bulk request', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'Content-Type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), { doc: dataset[count++], doc_as_upsert: true })
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
let id = 0
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return [{
|
||||
update: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
}
|
||||
}, {
|
||||
doc_as_upsert: true
|
||||
}]
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('bulk delete', t => {
|
||||
t.test('Should perform a bulk request', async t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'Content-Type': 'application/x-ndjson' })
|
||||
t.deepEqual(JSON.parse(params.body), { delete: { _index: 'test', _id: count++ } })
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
let id = 0
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
delete: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
}
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.fail('This should never be called')
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 3,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
})
|
||||
|
||||
t.test('Should perform a bulk request (failure)', async t => {
|
||||
if (semver.lt(process.versions.node, '10.0.0')) {
|
||||
t.skip('This test will not pass on Node v8')
|
||||
return
|
||||
}
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
|
||||
let body = ''
|
||||
req.setEncoding('utf8')
|
||||
for await (const chunk of req) {
|
||||
body += chunk
|
||||
}
|
||||
|
||||
res.setHeader('content-type', 'application/json')
|
||||
|
||||
if (JSON.parse(body).delete._id === 1) {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: true,
|
||||
items: [{
|
||||
delete: {
|
||||
status: 400,
|
||||
error: { something: 'went wrong' }
|
||||
}
|
||||
}]
|
||||
}))
|
||||
} else {
|
||||
res.end(JSON.stringify({
|
||||
took: 0,
|
||||
errors: false,
|
||||
items: [{}]
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
const [{ port }, server] = await buildServer(handler)
|
||||
const client = new Client({ node: `http://localhost:${port}` })
|
||||
let id = 0
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: dataset.slice(),
|
||||
flushBytes: 1,
|
||||
concurrency: 1,
|
||||
wait: 10,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
delete: {
|
||||
_index: 'test',
|
||||
_id: id++
|
||||
}
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.deepEqual(doc, {
|
||||
status: 400,
|
||||
error: { something: 'went wrong' },
|
||||
operation: { delete: { _index: 'test', _id: 1 } },
|
||||
document: null,
|
||||
retried: false
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 3,
|
||||
successful: 2,
|
||||
retry: 0,
|
||||
failed: 1,
|
||||
aborted: false
|
||||
})
|
||||
server.stop()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('errors', t => {
|
||||
t.test('datasource type', async t => {
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
try {
|
||||
await client.helpers.bulk({
|
||||
datasource: 'hello',
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ConfigurationError)
|
||||
t.is(err.message, 'bulk helper: the datasource must be an array or a buffer or a readable stream')
|
||||
}
|
||||
})
|
||||
|
||||
t.test('missing datasource', async t => {
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
try {
|
||||
await client.helpers.bulk({
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: 'test' }
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ConfigurationError)
|
||||
t.is(err.message, 'bulk helper: the datasource is required')
|
||||
}
|
||||
})
|
||||
|
||||
t.test('missing onDocument', async t => {
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
try {
|
||||
await client.helpers.bulk({
|
||||
datasource: dataset.slice()
|
||||
})
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ConfigurationError)
|
||||
t.is(err.message, 'bulk helper: the onDocument callback is required')
|
||||
}
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
220
test/unit/helpers/scroll.test.js
Normal file
220
test/unit/helpers/scroll.test.js
Normal file
@ -0,0 +1,220 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { Client, errors } = require('../../../')
|
||||
const { connection } = require('../../utils')
|
||||
|
||||
test('Scroll search', async t => {
|
||||
var count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'scroll=1m')
|
||||
return {
|
||||
body: {
|
||||
_scroll_id: count === 3 ? undefined : 'id',
|
||||
count,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
t.strictEqual(result.body.count, count)
|
||||
if (count < 3) {
|
||||
t.strictEqual(result.body._scroll_id, 'id')
|
||||
} else {
|
||||
t.strictEqual(result.body._scroll_id, undefined)
|
||||
}
|
||||
count += 1
|
||||
}
|
||||
})
|
||||
|
||||
test('Clear a scroll search', async t => {
|
||||
var count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (params.method === 'DELETE') {
|
||||
const body = JSON.parse(params.body)
|
||||
t.strictEqual(body.scroll_id, 'id')
|
||||
}
|
||||
return {
|
||||
body: {
|
||||
_scroll_id: count === 3 ? undefined : 'id',
|
||||
count,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
if (count === 2) {
|
||||
t.fail('The scroll search should be cleared')
|
||||
}
|
||||
t.strictEqual(result.body.count, count)
|
||||
if (count === 1) {
|
||||
await result.clear()
|
||||
}
|
||||
count += 1
|
||||
}
|
||||
})
|
||||
|
||||
test('Scroll search (retry)', async t => {
|
||||
var count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (count === 1) {
|
||||
count += 1
|
||||
return { body: {}, statusCode: 429 }
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
body: {
|
||||
_scroll_id: count === 4 ? undefined : 'id',
|
||||
count,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
wait: 10
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
t.strictEqual(result.body.count, count)
|
||||
t.notStrictEqual(result.body.count, 1)
|
||||
if (count < 4) {
|
||||
t.strictEqual(result.body._scroll_id, 'id')
|
||||
} else {
|
||||
t.strictEqual(result.body._scroll_id, undefined)
|
||||
}
|
||||
count += 1
|
||||
}
|
||||
})
|
||||
|
||||
test('Scroll search (retry throws and maxRetries)', async t => {
|
||||
var count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
count += 1
|
||||
return { body: {}, statusCode: 429 }
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection,
|
||||
maxRetries: 5
|
||||
})
|
||||
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
wait: 10
|
||||
})
|
||||
|
||||
try {
|
||||
for await (const result of scrollSearch) { // eslint-disable-line
|
||||
t.fail('we should not be here')
|
||||
}
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.strictEqual(err.statusCode, 429)
|
||||
t.strictEqual(count, 5)
|
||||
}
|
||||
})
|
||||
|
||||
test('Scroll search documents', async t => {
|
||||
var count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
_scroll_id: count === 3 ? undefined : 'id',
|
||||
count,
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { val: 1 * count } },
|
||||
{ _source: { val: 2 * count } },
|
||||
{ _source: { val: 3 * count } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
|
||||
let n = 1
|
||||
for await (const hit of scrollSearch) {
|
||||
t.deepEqual(hit, { val: n * count })
|
||||
n += 1
|
||||
if (n === 4) {
|
||||
count += 1
|
||||
n = 1
|
||||
}
|
||||
}
|
||||
})
|
||||
42
test/unit/helpers/search.test.js
Normal file
42
test/unit/helpers/search.test.js
Normal file
@ -0,0 +1,42 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { test } = require('tap')
|
||||
const { Client } = require('../../../')
|
||||
const { connection } = require('../../utils')
|
||||
|
||||
test('Search should have an additional documents property', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
return {
|
||||
body: {
|
||||
hits: {
|
||||
hits: [
|
||||
{ _source: { one: 'one' } },
|
||||
{ _source: { two: 'two' } },
|
||||
{ _source: { three: 'three' } }
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
Connection: MockConnection
|
||||
})
|
||||
|
||||
const result = await client.helpers.search({
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.deepEqual(result, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
])
|
||||
})
|
||||
@ -4,6 +4,7 @@
|
||||
|
||||
'use strict'
|
||||
|
||||
const assert = require('assert')
|
||||
const { Connection } = require('../../index')
|
||||
const { TimeoutError } = require('../../lib/errors')
|
||||
const intoStream = require('into-stream')
|
||||
@ -99,6 +100,39 @@ class MockConnectionSniff extends Connection {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildMockConnection (opts) {
|
||||
assert(opts.onRequest, 'Missing required onRequest option')
|
||||
|
||||
class MockConnection extends Connection {
|
||||
request (params, callback) {
|
||||
var { body, statusCode } = opts.onRequest(params)
|
||||
if (typeof body !== 'string') {
|
||||
body = JSON.stringify(body)
|
||||
}
|
||||
var aborted = false
|
||||
const stream = intoStream(body)
|
||||
stream.statusCode = statusCode || 200
|
||||
stream.headers = {
|
||||
'content-type': 'application/json;utf=8',
|
||||
date: new Date().toISOString(),
|
||||
connection: 'keep-alive',
|
||||
'content-length': Buffer.byteLength(body)
|
||||
}
|
||||
process.nextTick(() => {
|
||||
if (!aborted) {
|
||||
callback(null, stream)
|
||||
}
|
||||
})
|
||||
return {
|
||||
abort: () => { aborted = true }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return MockConnection
|
||||
}
|
||||
|
||||
function setStatusCode (path) {
|
||||
const statusCode = Number(path.slice(1))
|
||||
if (Number.isInteger(statusCode)) {
|
||||
@ -111,5 +145,6 @@ module.exports = {
|
||||
MockConnection,
|
||||
MockConnectionTimeout,
|
||||
MockConnectionError,
|
||||
MockConnectionSniff
|
||||
MockConnectionSniff,
|
||||
buildMockConnection
|
||||
}
|
||||
|
||||
@ -38,11 +38,21 @@ function buildServer (handler, opts, cb) {
|
||||
console.log('http server error', err)
|
||||
process.exit(1)
|
||||
})
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port
|
||||
debug(`Server '${serverId}' booted on port ${port}`)
|
||||
cb(Object.assign({}, secureOpts, { port }), server)
|
||||
})
|
||||
if (cb === undefined) {
|
||||
return new Promise((resolve, reject) => {
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port
|
||||
debug(`Server '${serverId}' booted on port ${port}`)
|
||||
resolve([Object.assign({}, secureOpts, { port }), server])
|
||||
})
|
||||
})
|
||||
} else {
|
||||
server.listen(0, () => {
|
||||
const port = server.address().port
|
||||
debug(`Server '${serverId}' booted on port ${port}`)
|
||||
cb(Object.assign({}, secureOpts, { port }), server)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildServer
|
||||
|
||||
@ -4,12 +4,30 @@
|
||||
|
||||
'use strict'
|
||||
|
||||
const { promisify } = require('util')
|
||||
const sleep = promisify(setTimeout)
|
||||
const buildServer = require('./buildServer')
|
||||
const buildCluster = require('./buildCluster')
|
||||
const connection = require('./MockConnection')
|
||||
|
||||
async function waitCluster (client, waitForStatus = 'green', timeout = '50s', times = 0) {
|
||||
if (!client) {
|
||||
throw new Error('waitCluster helper: missing client instance')
|
||||
}
|
||||
try {
|
||||
await client.cluster.health({ waitForStatus, timeout })
|
||||
} catch (err) {
|
||||
if (++times < 10) {
|
||||
await sleep(5000)
|
||||
return waitCluster(client, waitForStatus, timeout, times)
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
buildServer,
|
||||
buildCluster,
|
||||
connection
|
||||
connection,
|
||||
waitCluster
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user