Support for Elasticsearch 7.7 (#1192)
This commit is contained in:
committed by
GitHub
parent
be6257380e
commit
51169d5efa
189
test/integration/helpers/bulk.test.js
Normal file
189
test/integration/helpers/bulk.test.js
Normal file
@ -0,0 +1,189 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const datasetPath = join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson')
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('bulk index', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: INDEX,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body } = await client.count({ index: INDEX })
|
||||
t.match(body, { count: 5000 })
|
||||
})
|
||||
|
||||
test('bulk index with custom id', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split(JSON.parse)),
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body } = await client.get({
|
||||
index: INDEX,
|
||||
id: '19273860' // id of document n° 4242
|
||||
})
|
||||
|
||||
t.strictEqual(body._index, INDEX)
|
||||
t.strictEqual(body._id, '19273860')
|
||||
t.strictEqual(body._source.id, '19273860')
|
||||
})
|
||||
|
||||
test('abort the operation on document drop', async t => {
|
||||
const stream = createReadStream(datasetPath)
|
||||
const b = client.helpers.bulk({
|
||||
datasource: stream.pipe(split(JSON.parse)),
|
||||
concurrency: 1,
|
||||
onDrop (doc) {
|
||||
t.strictEqual(doc.status, 400)
|
||||
t.strictEqual(doc.error.type, 'mapper_parsing_exception')
|
||||
t.strictEqual(doc.document.id, '45924372')
|
||||
b.abort()
|
||||
},
|
||||
onDocument (doc) {
|
||||
if (doc.id === '45924372') { // id of document n° 500
|
||||
// this will break the mapping
|
||||
doc.title = { foo: 'bar' }
|
||||
}
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const result = await b
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.strictEqual(result.total - 1, result.successful)
|
||||
t.match(result, {
|
||||
retry: 0,
|
||||
failed: 1,
|
||||
aborted: true
|
||||
})
|
||||
})
|
||||
|
||||
test('bulk delete', async t => {
|
||||
const indexResult = await client.helpers.bulk({
|
||||
datasource: createReadStream(datasetPath).pipe(split(JSON.parse)),
|
||||
refreshOnCompletion: true,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(indexResult.time, 'number')
|
||||
t.type(indexResult.bytes, 'number')
|
||||
t.match(indexResult, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body: afterIndex } = await client.count({ index: INDEX })
|
||||
t.match(afterIndex, { count: 5000 })
|
||||
|
||||
const deleteResult = await client.helpers.bulk({
|
||||
datasource: createReadStream(datasetPath).pipe(split(JSON.parse)),
|
||||
refreshOnCompletion: true,
|
||||
onDrop (doc) {
|
||||
t.fail('It should not drop any document')
|
||||
},
|
||||
onDocument (doc) {
|
||||
return {
|
||||
delete: {
|
||||
_index: INDEX,
|
||||
_id: doc.id
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.type(deleteResult.time, 'number')
|
||||
t.type(deleteResult.bytes, 'number')
|
||||
t.match(deleteResult, {
|
||||
total: 5000,
|
||||
successful: 5000,
|
||||
retry: 0,
|
||||
failed: 0,
|
||||
aborted: false
|
||||
})
|
||||
|
||||
const { body: afterDelete } = await client.count({ index: INDEX })
|
||||
t.match(afterDelete, { count: 0 })
|
||||
})
|
||||
103
test/integration/helpers/scroll.test.js
Normal file
103
test/integration/helpers/scroll.test.js
Normal file
@ -0,0 +1,103 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: true,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
if (result.failed > 0) {
|
||||
throw new Error('Failed bulk indexing docs')
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('search helper', async t => {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
var count = 0
|
||||
for await (const search of scrollSearch) {
|
||||
count += 1
|
||||
for (const doc of search.documents) {
|
||||
t.true(doc.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
}
|
||||
t.strictEqual(count, 11)
|
||||
})
|
||||
|
||||
test('clear a scroll search', async t => {
|
||||
const scrollSearch = client.helpers.scrollSearch({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
var count = 0
|
||||
for await (const search of scrollSearch) {
|
||||
count += 1
|
||||
if (count === 2) {
|
||||
search.clear()
|
||||
}
|
||||
}
|
||||
t.strictEqual(count, 2)
|
||||
})
|
||||
|
||||
test('scroll documents', async t => {
|
||||
const scrollSearch = client.helpers.scrollDocuments({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
var count = 0
|
||||
for await (const doc of scrollSearch) {
|
||||
count += 1
|
||||
t.true(doc.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
t.strictEqual(count, 106)
|
||||
})
|
||||
56
test/integration/helpers/search.test.js
Normal file
56
test/integration/helpers/search.test.js
Normal file
@ -0,0 +1,56 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
const { createReadStream } = require('fs')
|
||||
const { join } = require('path')
|
||||
const split = require('split2')
|
||||
const { test, beforeEach, afterEach } = require('tap')
|
||||
const { waitCluster } = require('../../utils')
|
||||
const { Client } = require('../../../')
|
||||
|
||||
const INDEX = `test-helpers-${process.pid}`
|
||||
const client = new Client({
|
||||
node: process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await waitCluster(client)
|
||||
await client.indices.create({ index: INDEX })
|
||||
const stream = createReadStream(join(__dirname, '..', '..', 'fixtures', 'stackoverflow.ndjson'))
|
||||
const result = await client.helpers.bulk({
|
||||
datasource: stream.pipe(split()),
|
||||
refreshOnCompletion: true,
|
||||
onDocument (doc) {
|
||||
return {
|
||||
index: { _index: INDEX }
|
||||
}
|
||||
}
|
||||
})
|
||||
if (result.failed > 0) {
|
||||
throw new Error('Failed bulk indexing docs')
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.indices.delete({ index: INDEX }, { ignore: 404 })
|
||||
})
|
||||
|
||||
test('search helper', async t => {
|
||||
const results = await client.helpers.search({
|
||||
index: INDEX,
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
title: 'javascript'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
t.strictEqual(results.length, 10)
|
||||
for (const result of results) {
|
||||
t.true(result.title.toLowerCase().includes('javascript'))
|
||||
}
|
||||
})
|
||||
@ -4,14 +4,15 @@
|
||||
|
||||
'use strict'
|
||||
|
||||
const { readFileSync, accessSync, mkdirSync, readdirSync, statSync } = require('fs')
|
||||
const { writeFileSync, readFileSync, accessSync, mkdirSync, readdirSync, statSync } = require('fs')
|
||||
const { join, sep } = require('path')
|
||||
const yaml = require('js-yaml')
|
||||
const Git = require('simple-git')
|
||||
const ms = require('ms')
|
||||
const { Client } = require('../../index')
|
||||
const build = require('./test-runner')
|
||||
const { sleep } = require('./helper')
|
||||
const ms = require('ms')
|
||||
const createJunitReporter = require('./reporter')
|
||||
|
||||
const esRepo = 'https://github.com/elastic/elasticsearch.git'
|
||||
const esFolder = join(__dirname, '..', '..', 'elasticsearch')
|
||||
@ -63,6 +64,23 @@ const xPackBlackList = {
|
||||
'sql/sql.yml': ['Getting textual representation'],
|
||||
// we are setting two certificates in the docker config
|
||||
'ssl/10_basic.yml': ['*'],
|
||||
// very likely, the index template has not been loaded yet.
|
||||
// we should run a indices.existsTemplate, but the name of the
|
||||
// template may vary during time.
|
||||
'transforms_crud.yml': [
|
||||
'Test basic transform crud',
|
||||
'Test transform with query and array of indices in source',
|
||||
'Test PUT continuous transform',
|
||||
'Test PUT continuous transform without delay set'
|
||||
],
|
||||
'transforms_force_delete.yml': [
|
||||
'Test force deleting a running transform'
|
||||
],
|
||||
'transforms_cat_apis.yml': ['*'],
|
||||
'transforms_start_stop.yml': ['*'],
|
||||
'transforms_stats.yml': ['*'],
|
||||
'transforms_stats_continuous.yml': ['*'],
|
||||
'transforms_update.yml': ['*'],
|
||||
// docker issue?
|
||||
'watcher/execute_watch/60_http_input.yml': ['*'],
|
||||
// the checks are correct, but for some reason the test is failing on js side
|
||||
@ -110,6 +128,8 @@ async function start ({ client, isXPack }) {
|
||||
await withSHA(sha)
|
||||
|
||||
log(`Testing ${isXPack ? 'XPack' : 'oss'} api...`)
|
||||
const junit = createJunitReporter()
|
||||
const junitTestSuites = junit.testsuites(`Integration test for ${isXPack ? 'XPack' : 'oss'} api`)
|
||||
|
||||
const stats = {
|
||||
total: 0,
|
||||
@ -173,31 +193,43 @@ async function start ({ client, isXPack }) {
|
||||
|
||||
const cleanPath = file.slice(file.lastIndexOf(apiName))
|
||||
log(' ' + cleanPath)
|
||||
const junitTestSuite = junitTestSuites.testsuite(apiName.slice(1) + ' - ' + cleanPath)
|
||||
|
||||
for (const test of tests) {
|
||||
const testTime = now()
|
||||
const name = Object.keys(test)[0]
|
||||
if (name === 'setup' || name === 'teardown') continue
|
||||
const junitTestCase = junitTestSuite.testcase(name)
|
||||
|
||||
stats.total += 1
|
||||
if (shouldSkip(isXPack, file, name)) {
|
||||
stats.skip += 1
|
||||
junitTestCase.skip('This test is in the skip list of the client')
|
||||
junitTestCase.end()
|
||||
continue
|
||||
}
|
||||
log(' - ' + name)
|
||||
try {
|
||||
await testRunner.run(setupTest, test[name], teardownTest, stats)
|
||||
await testRunner.run(setupTest, test[name], teardownTest, stats, junitTestCase)
|
||||
stats.pass += 1
|
||||
} catch (err) {
|
||||
junitTestCase.failure(err)
|
||||
junitTestCase.end()
|
||||
junitTestSuite.end()
|
||||
junitTestSuites.end()
|
||||
generateJunitXmlReport(junit, isXPack ? 'xpack' : 'oss')
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
const totalTestTime = now() - testTime
|
||||
junitTestCase.end()
|
||||
if (totalTestTime > MAX_TEST_TIME) {
|
||||
log(' took too long: ' + ms(totalTestTime))
|
||||
} else {
|
||||
log(' took: ' + ms(totalTestTime))
|
||||
}
|
||||
}
|
||||
junitTestSuite.end()
|
||||
const totalFileTime = now() - fileTime
|
||||
if (totalFileTime > MAX_FILE_TIME) {
|
||||
log(` ${cleanPath} took too long: ` + ms(totalFileTime))
|
||||
@ -212,6 +244,8 @@ async function start ({ client, isXPack }) {
|
||||
log(`${apiName} took: ` + ms(totalApiTime))
|
||||
}
|
||||
}
|
||||
junitTestSuites.end()
|
||||
generateJunitXmlReport(junit, isXPack ? 'xpack' : 'oss')
|
||||
log(`Total testing time: ${ms(now() - totalTime)}`)
|
||||
log(`Test stats:
|
||||
- Total: ${stats.total}
|
||||
@ -336,6 +370,13 @@ function createFolder (name) {
|
||||
}
|
||||
}
|
||||
|
||||
function generateJunitXmlReport (junit, suite) {
|
||||
writeFileSync(
|
||||
join(__dirname, '..', '..', `${suite}-report-junit.xml`),
|
||||
junit.prettyPrint()
|
||||
)
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
const node = process.env.TEST_ES_SERVER || 'http://localhost:9200'
|
||||
const opts = {
|
||||
|
||||
109
test/integration/reporter.js
Normal file
109
test/integration/reporter.js
Normal file
@ -0,0 +1,109 @@
|
||||
'use strict'
|
||||
|
||||
const assert = require('assert')
|
||||
const { create } = require('xmlbuilder2')
|
||||
|
||||
function createJunitReporter () {
|
||||
const report = {}
|
||||
|
||||
return { testsuites, prettyPrint }
|
||||
|
||||
function prettyPrint () {
|
||||
return create(report).end({ prettyPrint: true })
|
||||
}
|
||||
|
||||
function testsuites (name) {
|
||||
assert(name, 'The testsuites name is required')
|
||||
assert(report.testsuites === undefined, 'Cannot set more than one testsuites block')
|
||||
const startTime = Date.now()
|
||||
|
||||
report.testsuites = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
|
||||
const testsuiteList = []
|
||||
|
||||
return {
|
||||
testsuite: createTestSuite(testsuiteList),
|
||||
end () {
|
||||
report.testsuites['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
report.testsuites['@tests'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@tests']
|
||||
return acc
|
||||
}, 0)
|
||||
report.testsuites['@failures'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@failures']
|
||||
return acc
|
||||
}, 0)
|
||||
report.testsuites['@skipped'] = testsuiteList.reduce((acc, val) => {
|
||||
acc += val['@skipped']
|
||||
return acc
|
||||
}, 0)
|
||||
if (testsuiteList.length) {
|
||||
report.testsuites.testsuite = testsuiteList
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createTestSuite (testsuiteList) {
|
||||
return function testsuite (name) {
|
||||
assert(name, 'The testsuite name is required')
|
||||
const startTime = Date.now()
|
||||
const suite = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
const testcaseList = []
|
||||
testsuiteList.push(suite)
|
||||
return {
|
||||
testcase: createTestCase(testcaseList),
|
||||
end () {
|
||||
suite['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
suite['@tests'] = testcaseList.length
|
||||
suite['@failures'] = testcaseList.filter(t => t.failure).length
|
||||
suite['@skipped'] = testcaseList.filter(t => t.skipped).length
|
||||
if (testcaseList.length) {
|
||||
suite.testcase = testcaseList
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createTestCase (testcaseList) {
|
||||
return function testcase (name) {
|
||||
assert(name, 'The testcase name is required')
|
||||
const startTime = Date.now()
|
||||
const tcase = {
|
||||
'@id': new Date().toISOString(),
|
||||
'@name': name
|
||||
}
|
||||
testcaseList.push(tcase)
|
||||
return {
|
||||
failure (error) {
|
||||
assert(error, 'The failure error object is required')
|
||||
tcase.failure = {
|
||||
'#': error.stack,
|
||||
'@message': error.message,
|
||||
'@type': error.code
|
||||
}
|
||||
},
|
||||
skip (reason) {
|
||||
if (typeof reason !== 'string') {
|
||||
reason = JSON.stringify(reason, null, 2)
|
||||
}
|
||||
tcase.skipped = {
|
||||
'#': reason
|
||||
}
|
||||
},
|
||||
end () {
|
||||
tcase['@time'] = Math.round((Date.now() - startTime) / 1000)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = createJunitReporter
|
||||
@ -48,7 +48,7 @@ function build (opts = {}) {
|
||||
}
|
||||
|
||||
try {
|
||||
await client.indices.delete({ index: '_all' }, { ignore: 404 })
|
||||
await client.indices.delete({ index: '_all', expandWildcards: 'all' }, { ignore: 404 })
|
||||
} catch (err) {
|
||||
assert.ifError(err, 'should not error: indices.delete')
|
||||
}
|
||||
@ -212,11 +212,12 @@ function build (opts = {}) {
|
||||
* @oaram {object} teardown (null if not needed)
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async function run (setup, test, teardown, stats) {
|
||||
async function run (setup, test, teardown, stats, junit) {
|
||||
// if we should skip a feature in the setup/teardown section
|
||||
// we should skip the entire test file
|
||||
const skip = getSkip(setup) || getSkip(teardown)
|
||||
if (skip && shouldSkip(esVersion, skip)) {
|
||||
junit.skip(skip)
|
||||
logSkip(skip)
|
||||
return
|
||||
}
|
||||
@ -234,11 +235,11 @@ function build (opts = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
if (setup) await exec('Setup', setup, stats)
|
||||
if (setup) await exec('Setup', setup, stats, junit)
|
||||
|
||||
await exec('Test', test, stats)
|
||||
await exec('Test', test, stats, junit)
|
||||
|
||||
if (teardown) await exec('Teardown', teardown, stats)
|
||||
if (teardown) await exec('Teardown', teardown, stats, junit)
|
||||
|
||||
if (isXPack) await cleanupXPack()
|
||||
|
||||
@ -445,11 +446,12 @@ function build (opts = {}) {
|
||||
* @param {object} the actions to perform
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async function exec (name, actions, stats) {
|
||||
async function exec (name, actions, stats, junit) {
|
||||
// tap.comment(name)
|
||||
for (const action of actions) {
|
||||
if (action.skip) {
|
||||
if (shouldSkip(esVersion, action.skip)) {
|
||||
junit.skip(fillStashedValues(action.skip))
|
||||
logSkip(fillStashedValues(action.skip))
|
||||
break
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user