Drop node v10 support (#1471)
This commit is contained in:
committed by
delvedor
parent
6001e5a328
commit
fe6a73b5fb
@ -44,14 +44,14 @@ test('bulk index', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, {
|
||||
'content-type': 'application/x-ndjson',
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=bp`
|
||||
})
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -89,14 +89,14 @@ test('bulk index', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.notMatch(params.headers, {
|
||||
'x-elastic-client-meta': `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion},h=bp`
|
||||
})
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -134,9 +134,9 @@ test('bulk index', t => {
|
||||
t.test('Should perform a bulk request (high flush size)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.strictEqual(params.body.split('\n').filter(Boolean).length, 6)
|
||||
t.equal(params.body.split('\n').filter(Boolean).length, 6)
|
||||
return { body: { errors: false, items: new Array(3).fill({}) } }
|
||||
}
|
||||
})
|
||||
@ -175,14 +175,14 @@ test('bulk index', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (params.method === 'GET') {
|
||||
t.strictEqual(params.path, '/_all/_refresh')
|
||||
t.equal(params.path, '/_all/_refresh')
|
||||
return { body: { acknowledged: true } }
|
||||
} else {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
}
|
||||
@ -220,14 +220,14 @@ test('bulk index', t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (params.method === 'GET') {
|
||||
t.strictEqual(params.path, '/test/_refresh')
|
||||
t.equal(params.path, '/test/_refresh')
|
||||
return { body: { acknowledged: true } }
|
||||
} else {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
}
|
||||
@ -264,11 +264,11 @@ test('bulk index', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -308,7 +308,7 @@ test('bulk index', t => {
|
||||
|
||||
t.test('Should perform a bulk request (retry)', async t => {
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.equal(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
|
||||
let body = ''
|
||||
@ -353,7 +353,7 @@ test('bulk index', t => {
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.deepEqual(doc, {
|
||||
t.same(doc, {
|
||||
status: 429,
|
||||
error: null,
|
||||
operation: { index: { _index: 'test' } },
|
||||
@ -402,7 +402,7 @@ test('bulk index', t => {
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.deepEqual(doc, {
|
||||
t.same(doc, {
|
||||
status: 429,
|
||||
error: null,
|
||||
operation: { index: { _index: 'test' } },
|
||||
@ -426,7 +426,7 @@ test('bulk index', t => {
|
||||
|
||||
t.test('Should perform a bulk request (failure)', async t => {
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.equal(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
|
||||
let body = ''
|
||||
@ -471,7 +471,7 @@ test('bulk index', t => {
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.deepEqual(doc, {
|
||||
t.same(doc, {
|
||||
status: 400,
|
||||
error: { something: 'went wrong' },
|
||||
operation: { index: { _index: 'test' } },
|
||||
@ -525,7 +525,7 @@ test('bulk index', t => {
|
||||
await b
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
}
|
||||
})
|
||||
|
||||
@ -561,13 +561,13 @@ test('bulk index', t => {
|
||||
await b
|
||||
t.fail('Should throw')
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
}
|
||||
})
|
||||
|
||||
t.test('Should abort a bulk request', async t => {
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.equal(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
|
||||
let body = ''
|
||||
@ -653,8 +653,8 @@ test('bulk index', t => {
|
||||
}
|
||||
})
|
||||
.catch(err => {
|
||||
t.true(err instanceof errors.ConfigurationError)
|
||||
t.is(err.message, 'Bulk helper invalid action: \'foo\'')
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'Bulk helper invalid action: \'foo\'')
|
||||
})
|
||||
})
|
||||
|
||||
@ -666,11 +666,11 @@ test('bulk index', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -718,11 +718,11 @@ test('bulk index', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -774,11 +774,11 @@ test('bulk create', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { create: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { create: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -823,11 +823,11 @@ test('bulk update', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), { doc: dataset[count++], doc_as_upsert: true })
|
||||
t.same(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), { doc: dataset[count++], doc_as_upsert: true })
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -871,11 +871,11 @@ test('bulk update', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.deepEqual(JSON.parse(payload), { doc: dataset[count++] })
|
||||
t.same(JSON.parse(action), { update: { _index: 'test', _id: count } })
|
||||
t.same(JSON.parse(payload), { doc: dataset[count++] })
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -921,9 +921,9 @@ test('bulk delete', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
t.deepEqual(JSON.parse(params.body), { delete: { _index: 'test', _id: count++ } })
|
||||
t.same(JSON.parse(params.body), { delete: { _index: 'test', _id: count++ } })
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -963,7 +963,7 @@ test('bulk delete', t => {
|
||||
|
||||
t.test('Should perform a bulk request (failure)', async t => {
|
||||
async function handler (req, res) {
|
||||
t.strictEqual(req.url, '/_bulk')
|
||||
t.equal(req.url, '/_bulk')
|
||||
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
|
||||
|
||||
let body = ''
|
||||
@ -1011,7 +1011,7 @@ test('bulk delete', t => {
|
||||
}
|
||||
},
|
||||
onDrop (doc) {
|
||||
t.deepEqual(doc, {
|
||||
t.same(doc, {
|
||||
status: 400,
|
||||
error: { something: 'went wrong' },
|
||||
operation: { delete: { _index: 'test', _id: 1 } },
|
||||
@ -1051,7 +1051,7 @@ test('transport options', t => {
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
|
||||
t.strictEqual(params.path, '/_all/_refresh')
|
||||
t.equal(params.path, '/_all/_refresh')
|
||||
t.match(params.headers, {
|
||||
foo: 'bar'
|
||||
})
|
||||
@ -1081,7 +1081,7 @@ test('transport options', t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.strictEqual(count, 4) // three bulk requests, one refresh
|
||||
t.equal(count, 4) // three bulk requests, one refresh
|
||||
t.type(result.time, 'number')
|
||||
t.type(result.bytes, 'number')
|
||||
t.match(result, {
|
||||
@ -1111,8 +1111,8 @@ test('errors', t => {
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ConfigurationError)
|
||||
t.is(err.message, 'bulk helper: the datasource must be an array or a buffer or a readable stream or an async generator')
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'bulk helper: the datasource must be an array or a buffer or a readable stream or an async generator')
|
||||
}
|
||||
})
|
||||
|
||||
@ -1129,8 +1129,8 @@ test('errors', t => {
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ConfigurationError)
|
||||
t.is(err.message, 'bulk helper: the datasource is required')
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'bulk helper: the datasource is required')
|
||||
}
|
||||
})
|
||||
|
||||
@ -1143,8 +1143,8 @@ test('errors', t => {
|
||||
datasource: dataset.slice()
|
||||
})
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ConfigurationError)
|
||||
t.is(err.message, 'bulk helper: the onDocument callback is required')
|
||||
t.ok(err instanceof errors.ConfigurationError)
|
||||
t.equal(err.message, 'bulk helper: the onDocument callback is required')
|
||||
}
|
||||
})
|
||||
|
||||
@ -1159,11 +1159,11 @@ test('Flush interval', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
@ -1210,12 +1210,12 @@ test('Flush interval', t => {
|
||||
let count = 0
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.true(count < 2)
|
||||
t.strictEqual(params.path, '/_bulk')
|
||||
t.ok(count < 2)
|
||||
t.equal(params.path, '/_bulk')
|
||||
t.match(params.headers, { 'content-type': 'application/x-ndjson' })
|
||||
const [action, payload] = params.body.split('\n')
|
||||
t.deepEqual(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.deepEqual(JSON.parse(payload), dataset[count++])
|
||||
t.same(JSON.parse(action), { index: { _index: 'test' } })
|
||||
t.same(JSON.parse(payload), dataset[count++])
|
||||
return { body: { errors: false, items: [{}] } }
|
||||
}
|
||||
})
|
||||
|
||||
@ -56,7 +56,7 @@ test('Basic', async t => {
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
|
||||
t.deepEqual(result.body, {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
@ -67,7 +67,7 @@ test('Basic', async t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
@ -116,7 +116,7 @@ test('Multiple searches (inside async iterator)', t => {
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
@ -127,7 +127,7 @@ test('Multiple searches (inside async iterator)', t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
@ -136,7 +136,7 @@ test('Multiple searches (inside async iterator)', t => {
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
@ -147,7 +147,7 @@ test('Multiple searches (inside async iterator)', t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
t.same(result.documents, [
|
||||
{ four: 'four' },
|
||||
{ five: 'five' },
|
||||
{ six: 'six' }
|
||||
@ -197,7 +197,7 @@ test('Multiple searches (async iterator exits)', t => {
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
@ -208,7 +208,7 @@ test('Multiple searches (async iterator exits)', t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
@ -217,7 +217,7 @@ test('Multiple searches (async iterator exits)', t => {
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
@ -228,7 +228,7 @@ test('Multiple searches (async iterator exits)', t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
t.same(result.documents, [
|
||||
{ four: 'four' },
|
||||
{ five: 'five' },
|
||||
{ six: 'six' }
|
||||
@ -260,7 +260,7 @@ test('Stop a msearch processor (promises)', async t => {
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
} catch (err) {
|
||||
t.strictEqual(err.message, 'The msearch processor has been stopped')
|
||||
t.equal(err.message, 'The msearch processor has been stopped')
|
||||
}
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
@ -285,7 +285,7 @@ test('Stop a msearch processor (callbacks)', t => {
|
||||
m.stop()
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.strictEqual(err.message, 'The msearch processor has been stopped')
|
||||
t.equal(err.message, 'The msearch processor has been stopped')
|
||||
})
|
||||
})
|
||||
|
||||
@ -306,12 +306,12 @@ test('Bad header', t => {
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search(null, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.strictEqual(err.message, 'The header should be an object')
|
||||
t.equal(err.message, 'The header should be an object')
|
||||
})
|
||||
|
||||
m.search(null, { query: { match: { foo: 'bar' } } })
|
||||
.catch(err => {
|
||||
t.strictEqual(err.message, 'The header should be an object')
|
||||
t.equal(err.message, 'The header should be an object')
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
@ -334,12 +334,12 @@ test('Bad body', t => {
|
||||
const m = client.helpers.msearch()
|
||||
|
||||
m.search({ index: 'test' }, null, (err, result) => {
|
||||
t.strictEqual(err.message, 'The body should be an object')
|
||||
t.equal(err.message, 'The body should be an object')
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, null)
|
||||
.catch(err => {
|
||||
t.strictEqual(err.message, 'The body should be an object')
|
||||
t.equal(err.message, 'The body should be an object')
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
@ -389,7 +389,7 @@ test('Retry on 429', async t => {
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
|
||||
t.deepEqual(result.body, {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
@ -400,7 +400,7 @@ test('Retry on 429', async t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
@ -436,7 +436,7 @@ test('Single search errors', async t => {
|
||||
{ query: { match: { foo: 'bar' } } }
|
||||
)
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
}
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
@ -465,13 +465,13 @@ test('Entire msearch fails', t => {
|
||||
const m = client.helpers.msearch({ operations: 1 })
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.deepEqual(result.documents, [])
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.same(result.documents, [])
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.deepEqual(result.documents, [])
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.same(result.documents, [])
|
||||
})
|
||||
|
||||
t.teardown(() => m.stop())
|
||||
@ -523,13 +523,13 @@ test('Stop the msearch helper with an error', t => {
|
||||
|
||||
m.then(
|
||||
() => t.fail('Should fail'),
|
||||
err => t.is(err.message, 'kaboom')
|
||||
err => t.equal(err.message, 'kaboom')
|
||||
)
|
||||
|
||||
m.catch(err => t.is(err.message, 'kaboom'))
|
||||
m.catch(err => t.equal(err.message, 'kaboom'))
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.is(err.message, 'kaboom')
|
||||
t.equal(err.message, 'kaboom')
|
||||
})
|
||||
})
|
||||
|
||||
@ -564,7 +564,7 @@ test('Multiple searches (concurrency = 1)', t => {
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
@ -575,7 +575,7 @@ test('Multiple searches (concurrency = 1)', t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
@ -584,7 +584,7 @@ test('Multiple searches (concurrency = 1)', t => {
|
||||
|
||||
m.search({ index: 'test' }, { query: {} }, (err, result) => {
|
||||
t.error(err)
|
||||
t.deepEqual(result.body, {
|
||||
t.same(result.body, {
|
||||
status: 200,
|
||||
hits: {
|
||||
hits: [
|
||||
@ -595,7 +595,7 @@ test('Multiple searches (concurrency = 1)', t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.deepEqual(result.documents, [
|
||||
t.same(result.documents, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
@ -647,12 +647,12 @@ test('Flush interval', t => {
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.is(result.documents.length, 3)
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.is(result.documents.length, 3)
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
|
||||
setImmediate(clock.next)
|
||||
@ -691,7 +691,7 @@ test('Flush interval - early stop', t => {
|
||||
|
||||
m.search({ index: 'test' }, { query: { match: { foo: 'bar' } } }, (err, result) => {
|
||||
t.error(err)
|
||||
t.is(result.documents.length, 3)
|
||||
t.equal(result.documents.length, 3)
|
||||
})
|
||||
|
||||
setImmediate(() => {
|
||||
@ -750,11 +750,11 @@ test('Stop should resolve the helper (error)', t => {
|
||||
setImmediate(m.stop, new Error('kaboom'))
|
||||
|
||||
m.then(() => t.fail('Should not fail'))
|
||||
.catch(err => t.is(err.message, 'kaboom'))
|
||||
.catch(err => t.equal(err.message, 'kaboom'))
|
||||
|
||||
m.catch(err => t.is(err.message, 'kaboom'))
|
||||
m.catch(err => t.equal(err.message, 'kaboom'))
|
||||
|
||||
m.then(() => t.fail('Should not fail'), err => t.is(err.message, 'kaboom'))
|
||||
m.then(() => t.fail('Should not fail'), err => t.equal(err.message, 'kaboom'))
|
||||
})
|
||||
|
||||
test('Should use req options', async t => {
|
||||
|
||||
@ -38,11 +38,11 @@ test('Scroll search', async t => {
|
||||
|
||||
count += 1
|
||||
if (params.method === 'POST') {
|
||||
t.strictEqual(params.querystring, 'scroll=1m')
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
}
|
||||
if (count === 4) {
|
||||
// final automated clear
|
||||
t.strictEqual(params.method, 'DELETE')
|
||||
t.equal(params.method, 'DELETE')
|
||||
}
|
||||
return {
|
||||
body: {
|
||||
@ -73,8 +73,8 @@ test('Scroll search', async t => {
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
t.strictEqual(result.body.count, count)
|
||||
t.strictEqual(result.body._scroll_id, 'id')
|
||||
t.equal(result.body.count, count)
|
||||
t.equal(result.body._scroll_id, 'id')
|
||||
}
|
||||
})
|
||||
|
||||
@ -87,7 +87,7 @@ test('Clear a scroll search', async t => {
|
||||
})
|
||||
if (params.method === 'DELETE') {
|
||||
const body = JSON.parse(params.body)
|
||||
t.strictEqual(body.scroll_id, 'id')
|
||||
t.equal(body.scroll_id, 'id')
|
||||
}
|
||||
return {
|
||||
body: {
|
||||
@ -120,7 +120,7 @@ test('Clear a scroll search', async t => {
|
||||
if (count === 2) {
|
||||
t.fail('The scroll search should be cleared')
|
||||
}
|
||||
t.strictEqual(result.body.count, count)
|
||||
t.equal(result.body.count, count)
|
||||
if (count === 1) {
|
||||
await result.clear()
|
||||
}
|
||||
@ -138,7 +138,7 @@ test('Scroll search (retry)', async t => {
|
||||
}
|
||||
if (count === 5) {
|
||||
// final automated clear
|
||||
t.strictEqual(params.method, 'DELETE')
|
||||
t.equal(params.method, 'DELETE')
|
||||
}
|
||||
return {
|
||||
statusCode: 200,
|
||||
@ -172,9 +172,9 @@ test('Scroll search (retry)', async t => {
|
||||
})
|
||||
|
||||
for await (const result of scrollSearch) {
|
||||
t.strictEqual(result.body.count, count)
|
||||
t.notStrictEqual(result.body.count, 1)
|
||||
t.strictEqual(result.body._scroll_id, 'id')
|
||||
t.equal(result.body.count, count)
|
||||
t.not(result.body.count, 1)
|
||||
t.equal(result.body._scroll_id, 'id')
|
||||
}
|
||||
})
|
||||
|
||||
@ -208,9 +208,9 @@ test('Scroll search (retry throws and maxRetries)', async t => {
|
||||
t.fail('we should not be here')
|
||||
}
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.strictEqual(err.statusCode, 429)
|
||||
t.strictEqual(count, expectedAttempts)
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.equal(err.statusCode, 429)
|
||||
t.equal(count, expectedAttempts)
|
||||
}
|
||||
})
|
||||
|
||||
@ -222,7 +222,7 @@ test('Scroll search (retry throws later)', async t => {
|
||||
onRequest (params) {
|
||||
count += 1
|
||||
// filter_path should not be added if is not already present
|
||||
t.strictEqual(params.querystring, 'scroll=1m')
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
if (count > 1) {
|
||||
return { body: {}, statusCode: 429 }
|
||||
}
|
||||
@ -258,12 +258,12 @@ test('Scroll search (retry throws later)', async t => {
|
||||
|
||||
try {
|
||||
for await (const result of scrollSearch) { // eslint-disable-line
|
||||
t.strictEqual(result.body.count, count)
|
||||
t.equal(result.body.count, count)
|
||||
}
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.strictEqual(err.statusCode, 429)
|
||||
t.strictEqual(count, expectedAttempts)
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.equal(err.statusCode, 429)
|
||||
t.equal(count, expectedAttempts)
|
||||
}
|
||||
})
|
||||
|
||||
@ -272,11 +272,11 @@ test('Scroll search documents', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (count === 0) {
|
||||
t.strictEqual(params.querystring, 'filter_path=hits.hits._source%2C_scroll_id&scroll=1m')
|
||||
t.equal(params.querystring, 'filter_path=hits.hits._source%2C_scroll_id&scroll=1m')
|
||||
} else {
|
||||
if (params.method !== 'DELETE') {
|
||||
t.strictEqual(params.querystring, 'scroll=1m')
|
||||
t.strictEqual(params.body, '{"scroll_id":"id"}')
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
t.equal(params.body, '{"scroll_id":"id"}')
|
||||
}
|
||||
}
|
||||
return {
|
||||
@ -309,7 +309,7 @@ test('Scroll search documents', async t => {
|
||||
|
||||
let n = 1
|
||||
for await (const hit of scrollSearch) {
|
||||
t.deepEqual(hit, { val: n * count })
|
||||
t.same(hit, { val: n * count })
|
||||
n += 1
|
||||
if (n === 4) {
|
||||
count += 1
|
||||
@ -348,9 +348,9 @@ test('Should not retry if maxRetries = 0', async t => {
|
||||
t.fail('we should not be here')
|
||||
}
|
||||
} catch (err) {
|
||||
t.true(err instanceof errors.ResponseError)
|
||||
t.strictEqual(err.statusCode, 429)
|
||||
t.strictEqual(count, expectedAttempts)
|
||||
t.ok(err instanceof errors.ResponseError)
|
||||
t.equal(err.statusCode, 429)
|
||||
t.equal(count, expectedAttempts)
|
||||
}
|
||||
})
|
||||
|
||||
@ -359,10 +359,10 @@ test('Fix querystring for scroll search', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
if (count === 0) {
|
||||
t.strictEqual(params.querystring, 'size=1&scroll=1m')
|
||||
t.equal(params.querystring, 'size=1&scroll=1m')
|
||||
} else {
|
||||
if (params.method !== 'DELETE') {
|
||||
t.strictEqual(params.querystring, 'scroll=1m')
|
||||
t.equal(params.querystring, 'scroll=1m')
|
||||
}
|
||||
}
|
||||
return {
|
||||
@ -392,7 +392,7 @@ test('Fix querystring for scroll search', async t => {
|
||||
})
|
||||
|
||||
for await (const response of scrollSearch) {
|
||||
t.strictEqual(response.body.hits.hits.length, 1)
|
||||
t.equal(response.body.hits.hits.length, 1)
|
||||
count += 1
|
||||
}
|
||||
})
|
||||
|
||||
@ -26,7 +26,7 @@ const { connection } = require('../../utils')
|
||||
test('Search should have an additional documents property', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=hits.hits._source')
|
||||
t.equal(params.querystring, 'filter_path=hits.hits._source')
|
||||
return {
|
||||
body: {
|
||||
hits: {
|
||||
@ -50,7 +50,7 @@ test('Search should have an additional documents property', async t => {
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.deepEqual(result, [
|
||||
t.same(result, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
@ -60,7 +60,7 @@ test('Search should have an additional documents property', async t => {
|
||||
test('kGetHits fallback', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=hits.hits._source')
|
||||
t.equal(params.querystring, 'filter_path=hits.hits._source')
|
||||
return { body: {} }
|
||||
}
|
||||
})
|
||||
@ -74,13 +74,13 @@ test('kGetHits fallback', async t => {
|
||||
index: 'test',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.deepEqual(result, [])
|
||||
t.same(result, [])
|
||||
})
|
||||
|
||||
test('Merge filter paths (snake_case)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=foo%2Chits.hits._source')
|
||||
t.equal(params.querystring, 'filter_path=foo%2Chits.hits._source')
|
||||
return {
|
||||
body: {
|
||||
hits: {
|
||||
@ -105,7 +105,7 @@ test('Merge filter paths (snake_case)', async t => {
|
||||
filter_path: 'foo',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.deepEqual(result, [
|
||||
t.same(result, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
@ -115,7 +115,7 @@ test('Merge filter paths (snake_case)', async t => {
|
||||
test('Merge filter paths (camelCase)', async t => {
|
||||
const MockConnection = connection.buildMockConnection({
|
||||
onRequest (params) {
|
||||
t.strictEqual(params.querystring, 'filter_path=foo%2Chits.hits._source')
|
||||
t.equal(params.querystring, 'filter_path=foo%2Chits.hits._source')
|
||||
return {
|
||||
body: {
|
||||
hits: {
|
||||
@ -140,7 +140,7 @@ test('Merge filter paths (camelCase)', async t => {
|
||||
filterPath: 'foo',
|
||||
body: { foo: 'bar' }
|
||||
})
|
||||
t.deepEqual(result, [
|
||||
t.same(result, [
|
||||
{ one: 'one' },
|
||||
{ two: 'two' },
|
||||
{ three: 'three' }
|
||||
|
||||
Reference in New Issue
Block a user