Remove Node.js v8 support (#1402)

This commit is contained in:
Tomas Della Vedova
2021-02-19 08:27:20 +01:00
committed by GitHub
parent 7801b2cc13
commit ab5d8997b8
44 changed files with 258 additions and 302 deletions

View File

@ -6,7 +6,6 @@ NODE_JS_VERSION:
- 14
- 12
- 10
- 8
TEST_SUITE:
- free

View File

@ -40,31 +40,6 @@ jobs:
run: |
npm run test:types
test-node-v8:
name: Test
runs-on: ${{ matrix.os }}
strategy:
matrix:
node-version: [8.x]
os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Install
run: |
npm install
- name: Test
run: |
npm run test:node8
helpers-integration-test:
name: Helpers integration test
runs-on: ubuntu-latest

View File

@ -28,7 +28,7 @@ npm install @elastic/elasticsearch
### Node.js support
NOTE: The minimum supported version of Node.js is `v8`.
NOTE: The minimum supported version of Node.js is `v10`.
The client versioning follows the Elastc Stack versioning, this means that
major, minor, and patch releases are done following a precise schedule that

View File

@ -31,10 +31,10 @@ function handleError (err, callback) {
}
function snakeCaseKeys (acceptedQuerystring, snakeCase, querystring) {
var target = {}
var keys = Object.keys(querystring)
for (var i = 0, len = keys.length; i < len; i++) {
var key = keys[i]
const target = {}
const keys = Object.keys(querystring)
for (let i = 0, len = keys.length; i < len; i++) {
const key = keys[i]
target[snakeCase[key] || key] = querystring[key]
}
return target

View File

@ -57,7 +57,7 @@ async function run () {
})
// stream async iteration, available in Node.js ≥ 10
var payload = ''
let payload = ''
body.setEncoding('utf8')
for await (const chunk of body) {
payload += chunk
@ -65,7 +65,7 @@ async function run () {
console.log(JSON.parse(payload))
// classic stream callback style
var payload = ''
let payload = ''
body.setEncoding('utf8')
body.on('data', chunk => { payload += chunk })
body.on('error', console.log)
@ -101,4 +101,4 @@ fastify.post('/search/:index', async (req, reply) => {
})
fastify.listen(3000)
----
----

View File

@ -124,7 +124,7 @@ const client = new Client({ node: 'http://localhost:9200' })
// Scroll utility
async function * scrollSearch (params) {
var response = await client.search(params)
let response = await client.search(params)
while (true) {
const sourceHits = response.body.hits.hits
@ -190,4 +190,4 @@ async function run () {
}
run().catch(console.log)
----
----

View File

@ -58,7 +58,7 @@ async function run () {
const data = body.rows.map(row => {
const obj = {}
for (var i = 0; i < row.length; i++) {
for (let i = 0; i < row.length; i++) {
obj[body.columns[i].name] = row[i]
}
return obj

View File

@ -24,7 +24,7 @@ To learn more about the supported major versions, please refer to the
[[nodejs-support]]
=== Node.js support
NOTE: The minimum supported version of Node.js is `v8`.
NOTE: The minimum supported version of Node.js is `v10`.
The client versioning follows the {stack} versioning, this means that
major, minor, and patch releases are done following a precise schedule that

View File

@ -27,8 +27,7 @@ const debug = require('debug')('elasticsearch')
const Transport = require('./lib/Transport')
const Connection = require('./lib/Connection')
const { ConnectionPool, CloudConnectionPool } = require('./lib/pool')
// Helpers works only in Node.js >= 10
const Helpers = nodeMajor < 10 ? /* istanbul ignore next */ null : require('./lib/Helpers')
const Helpers = require('./lib/Helpers')
const Serializer = require('./lib/Serializer')
const errors = require('./lib/errors')
const { ConfigurationError } = errors
@ -48,15 +47,6 @@ const kEventEmitter = Symbol('elasticsearchjs-event-emitter')
const ESAPI = require('./api')
/* istanbul ignore next */
if (nodeMajor < 10) {
process.emitWarning('You are using a version of Node.js that is currently in EOL. ' +
'The support for this version will be dropped in 7.12. ' +
'Please refer to https://ela.st/nodejs-support for additional information.',
'DeprecationWarning'
)
}
/* istanbul ignore next */
if (nodeMajor >= 10 && nodeMajor < 12) {
process.emitWarning('You are using a version of Node.js that will reach EOL in April 2021. ' +
@ -189,16 +179,13 @@ class Client extends ESAPI {
context: options.context
})
/* istanbul ignore else */
if (Helpers !== null) {
this.helpers = new Helpers({
client: this,
maxRetries: options.maxRetries,
metaHeader: options.enableMetaHeader
? `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}`
: null
})
}
this.helpers = new Helpers({
client: this,
maxRetries: options.maxRetries,
metaHeader: options.enableMetaHeader
? `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}`
: null
})
}
get emit () {
@ -223,7 +210,7 @@ class Client extends ESAPI {
opts = {}
}
var [namespace, method] = name.split('.')
let [namespace, method] = name.split('.')
if (method == null) {
method = namespace
namespace = null

View File

@ -213,8 +213,8 @@ class Connection {
}
const paramsKeys = Object.keys(params)
for (var i = 0, len = paramsKeys.length; i < len; i++) {
var key = paramsKeys[i]
for (let i = 0, len = paramsKeys.length; i < len; i++) {
const key = paramsKeys[i]
if (key === 'path') {
request.pathname = resolve(request.pathname, params[key])
} else if (key === 'querystring' && !!params[key] === true) {

View File

@ -27,8 +27,9 @@ const { SerializationError, DeserializationError } = require('./errors')
class Serializer {
serialize (object) {
debug('Serializing', object)
let json
try {
var json = JSON.stringify(object)
json = JSON.stringify(object)
} catch (err) {
throw new SerializationError(err.message, object)
}
@ -37,8 +38,9 @@ class Serializer {
deserialize (json) {
debug('Deserializing', json)
let object
try {
var object = sjson.parse(json)
object = sjson.parse(json)
} catch (err) {
throw new DeserializationError(err.message, json)
}
@ -50,8 +52,8 @@ class Serializer {
if (Array.isArray(array) === false) {
throw new SerializationError('The argument provided is not an array')
}
var ndjson = ''
for (var i = 0, len = array.length; i < len; i++) {
let ndjson = ''
for (let i = 0, len = array.length; i < len; i++) {
if (typeof array[i] === 'string') {
ndjson += array[i] + '\n'
} else {
@ -67,8 +69,8 @@ class Serializer {
if (typeof object === 'string') return object
// arrays should be serialized as comma separated list
const keys = Object.keys(object)
for (var i = 0, len = keys.length; i < len; i++) {
var key = keys[i]
for (let i = 0, len = keys.length; i < len; i++) {
const key = keys[i]
// elasticsearch will complain for keys without a value
if (object[key] === undefined) {
delete object[key]

View File

@ -91,7 +91,7 @@ class Transport {
callback = options
options = {}
}
var p = null
let p = null
// promises support
if (callback === undefined) {
@ -147,9 +147,10 @@ class Transport {
// the size of the stream, we risk to take too much memory.
// Furthermore, copying everytime the stream is very a expensive operation.
const maxRetries = isStream(params.body) || isStream(params.bulkBody)
? 0 : (typeof options.maxRetries === 'number' ? options.maxRetries : this.maxRetries)
? 0
: (typeof options.maxRetries === 'number' ? options.maxRetries : this.maxRetries)
const compression = options.compression !== undefined ? options.compression : this.compression
var request = { abort: noop }
let request = { abort: noop }
const transportReturn = {
then (onFulfilled, onRejected) {
return p.then(onFulfilled, onRejected)
@ -525,7 +526,7 @@ function defaultNodeFilter (node) {
}
function roundRobinSelector () {
var current = -1
let current = -1
return function _roundRobinSelector (connections) {
if (++current >= connections.length) {
current = 0
@ -540,8 +541,8 @@ function randomSelector (connections) {
}
function generateRequestId () {
var maxInt = 2147483647
var nextReqId = 0
const maxInt = 2147483647
let nextReqId = 0
return function genReqId (params, options) {
return (nextReqId = (nextReqId + 1) & maxInt)
}

View File

@ -128,7 +128,7 @@ class BaseConnectionPool {
*/
empty (callback) {
debug('Emptying the connection pool')
var openConnections = this.size
let openConnections = this.size
this.connections.forEach(connection => {
connection.close(() => {
if (--openConnections === 0) {
@ -201,7 +201,7 @@ class BaseConnectionPool {
const ids = Object.keys(nodes)
const hosts = []
for (var i = 0, len = ids.length; i < len; i++) {
for (let i = 0, len = ids.length; i < len; i++) {
const node = nodes[ids[i]]
// If there is no protocol in
// the `publish_address` new URL will throw
@ -210,7 +210,7 @@ class BaseConnectionPool {
// - hostname/ip:port
// if we encounter the second case, we should
// use the hostname instead of the ip
var address = node.http.publish_address
let address = node.http.publish_address
const parts = address.split('/')
// the url is in the form of hostname/ip:port
if (parts.length > 1) {

View File

@ -80,7 +80,7 @@ class ConnectionPool extends BaseConnectionPool {
// list a node that no longer exist. The following check verify
// that the connection is still part of the pool before
// marking it as dead.
for (var i = 0; i < this.size; i++) {
for (let i = 0; i < this.size; i++) {
if (this.connections[i].id === id) {
this.dead.push(id)
break
@ -138,7 +138,7 @@ class ConnectionPool extends BaseConnectionPool {
path: '/',
timeout: this.pingTimeout
}, (err, response) => {
var isAlive = true
let isAlive = true
const statusCode = response !== null ? response.statusCode : 0
if (err != null ||
(statusCode === 502 || statusCode === 503 || statusCode === 504)) {
@ -170,8 +170,7 @@ class ConnectionPool extends BaseConnectionPool {
isAlive: true,
connection
})
// eslint-disable-next-line standard/no-callback-literal
callback(true, connection)
callback(true, connection) // eslint-disable-line
}
}
@ -199,7 +198,7 @@ class ConnectionPool extends BaseConnectionPool {
// TODO: can we cache this?
const connections = []
for (var i = 0; i < this.size; i++) {
for (let i = 0; i < this.size; i++) {
const connection = this.connections[i]
if (noAliveConnections || connection.status === Connection.statuses.ALIVE) {
if (filter(connection) === true) {

View File

@ -25,7 +25,6 @@
],
"scripts": {
"test": "npm run lint && tap test/{unit,acceptance}/{*,**/*}.test.js && npm run test:types",
"test:node8": "npm run lint && tap test/{unit,acceptance}/*.test.js",
"test:unit": "tap test/unit/{*,**/*}.test.js",
"test:acceptance": "tap test/acceptance/*.test.js",
"test:integration": "node test/integration/index.js",
@ -49,37 +48,37 @@
},
"devDependencies": {
"@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1",
"@types/node": "^12.6.2",
"@types/node": "^14.14.28",
"convert-hrtime": "^3.0.0",
"dedent": "^0.7.0",
"deepmerge": "^4.0.0",
"deepmerge": "^4.2.2",
"dezalgo": "^1.0.3",
"fast-deep-equal": "^3.1.1",
"into-stream": "^5.1.1",
"js-yaml": "^3.13.1",
"fast-deep-equal": "^3.1.3",
"into-stream": "^6.0.0",
"js-yaml": "^4.0.0",
"license-checker": "^25.0.1",
"minimist": "^1.2.0",
"ora": "^3.4.0",
"minimist": "^1.2.5",
"ora": "^5.3.0",
"pretty-hrtime": "^1.0.3",
"proxy": "^1.0.2",
"rimraf": "^2.6.3",
"semver": "^6.0.0",
"simple-git": "^1.110.0",
"simple-statistics": "^7.0.2",
"split2": "^3.1.1",
"standard": "^13.0.2",
"rimraf": "^3.0.2",
"semver": "^7.3.4",
"simple-git": "^2.35.0",
"simple-statistics": "^7.4.1",
"split2": "^3.2.2",
"standard": "^16.0.3",
"stoppable": "^1.1.0",
"tap": "^14.4.1",
"tsd": "^0.13.1",
"workq": "^2.1.0",
"xmlbuilder2": "^2.1.2"
"tap": "^14.11.0",
"tsd": "^0.14.0",
"workq": "^3.0.0",
"xmlbuilder2": "^2.4.0"
},
"dependencies": {
"debug": "^4.1.1",
"debug": "^4.3.1",
"hpagent": "^0.1.1",
"ms": "^2.1.1",
"ms": "^2.1.3",
"pump": "^3.0.0",
"secure-json-parse": "^2.1.0"
"secure-json-parse": "^2.3.1"
},
"license": "Apache-2.0",
"repository": {
@ -90,11 +89,16 @@
"url": "https://github.com/elastic/elasticsearch-js/issues"
},
"engines": {
"node": ">=8"
"node": ">=10"
},
"tsd": {
"directory": "test/types"
},
"standard": {
"ignore": [
"/api"
]
},
"tap": {
"esm": false,
"ts": false,
@ -103,4 +107,4 @@
"coverage": false,
"jobs-auto": true
}
}
}

View File

@ -126,14 +126,14 @@ function generate () {
}
function generateAsciidoc (source) {
var asciidoc = '// This file is autogenerated, DO NOT EDIT\n'
let asciidoc = '// This file is autogenerated, DO NOT EDIT\n'
asciidoc += '// Use `node scripts/generate-docs-examples.js` to generate the docs examples\n\n'
var code = 'async function run (client) {\n// START\n'
let code = 'async function run (client) {\n// START\n'
for (var i = 0; i < source.length; i++) {
for (let i = 0; i < source.length; i++) {
const { api, query, params, body } = source[i]
const apiArguments = Object.assign({}, params, query, body ? { body } : body)
var serializedApiArguments = Object.keys(apiArguments).length > 0
const serializedApiArguments = Object.keys(apiArguments).length > 0
? JSON.stringify(apiArguments, null, 2)
: ''
code += `const response${getResponsePostfix(i)} = await client.${api.replace(/_([a-z])/g, g => g[1].toUpperCase())}(${serializedApiArguments})

View File

@ -42,8 +42,8 @@ function cloneAndCheckout (opts, callback) {
* @param {function} callback
*/
function withTag (tag, callback) {
var fresh = false
var retry = 0
let fresh = false
let retry = 0
if (!pathExist(esFolder)) {
if (!createFolder(esFolder)) {

View File

@ -206,7 +206,7 @@ function generateSingleApi (version, spec, common) {
// get the required parts from the url
// if the url has at least one static path,
// then there are not required parts of the url
var allParts = []
let allParts = []
for (const path of paths) {
if (path.parts) {
allParts.push(Object.keys(path.parts))
@ -250,10 +250,10 @@ function generateSingleApi (version, spec, common) {
${genUrlValidation(paths, api)}
var { ${genQueryBlacklist(false)}, ...querystring } = params
let { ${genQueryBlacklist(false)}, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
let path = ''
${buildPath(api)}
// build request object
@ -289,7 +289,7 @@ function generateSingleApi (version, spec, common) {
return code.join('\n ')
function _genRequiredCheck (param) {
var camelCased = param[0] === '_'
const camelCased = param[0] === '_'
? '_' + param.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
: param.replace(/_([a-z])/g, k => k[1].toUpperCase())
@ -386,7 +386,7 @@ function generateSingleApi (version, spec, common) {
return path.length > 0 ? ('\'/\' + ' + path) : '\'/\''
}
var hasStaticPath = false
let hasStaticPath = false
const sortedPaths = paths
// some legacy API have mutliple statis paths
// this filter removes them
@ -401,8 +401,8 @@ function generateSingleApi (version, spec, common) {
// sort by number of parameters (desc)
.sort((a, b) => Object.keys(b.parts || {}).length - Object.keys(a.parts || {}).length)
var code = ''
for (var i = 0; i < sortedPaths.length; i++) {
let code = ''
for (let i = 0; i < sortedPaths.length; i++) {
const { path, methods } = sortedPaths[i]
if (sortedPaths.length === 1) {
code += `if (method == null) method = ${generatePickMethod(methods)}
@ -454,9 +454,9 @@ function genBody (api, methods, body) {
return 'bulkBody: body,'
}
if (body === null && bodyMethod) {
return `body: '',`
return 'body: \'\','
} else if (bodyMethod) {
return `body: body || '',`
return 'body: body || \'\','
} else {
return 'body: null,'
}
@ -490,13 +490,13 @@ function genUrlValidation (paths, api) {
.map(s => s.slice(1, -1))
.reverse()
var code = ''
let code = ''
const len = chunks.length
chunks.forEach((chunk, index) => {
if (index === len - 1) return
var params = []
var camelCased = chunk[0] === '_'
const params = []
let camelCased = chunk[0] === '_'
? '_' + chunk.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
: chunk.replace(/_([a-z])/g, k => k[1].toUpperCase())
@ -505,7 +505,7 @@ function genUrlValidation (paths, api) {
} else {
code += `${index ? '} else ' : ''}if ((params['${chunk}'] != null || params['${camelCased}'] != null) && (`
}
for (var i = index + 1; i < len; i++) {
for (let i = index + 1; i < len; i++) {
params.push(chunks[i])
// url parts can be declared in camelCase fashion
camelCased = chunks[i][0] === '_'
@ -541,7 +541,7 @@ function generateDocumentation ({ documentation }, op) {
if (documentation == null) return ''
var doc = '/**\n'
let doc = '/**\n'
doc += ` * Perform a ${op} request\n`
if (documentation.description) {
doc += ` * ${documentation.description.replace(/\u00A0/g, ' ')}\n`

View File

@ -28,7 +28,7 @@ const codeExamples = readdirSync(join(__dirname, '..', '..', 'docs', 'examples')
.filter(api => api !== 'index')
function generateDocs (common, spec) {
var doc = dedent`
let doc = dedent`
[[api-reference]]
////////
@ -110,7 +110,7 @@ function generateDocs (common, spec) {
}
function commonParameters (spec) {
var doc = dedent`
let doc = dedent`
[discrete]
=== Common parameters
Parameters that are accepted by all API endpoints.
@ -196,7 +196,7 @@ function generateApiDoc (spec) {
const codeParameters = params
.reduce((acc, val) => {
var code = `${val.name}: ${val.type},`
const code = `${val.name}: ${val.type},`
acc += acc === ''
? code
: '\n ' + code
@ -210,7 +210,7 @@ function generateApiDoc (spec) {
? ''
: `*Stability:* ${spec[name].stability}`
var doc = dedent`
let doc = dedent`
[discrete]
=== ${camelify(name)}
${stability}
@ -239,7 +239,7 @@ function generateApiDoc (spec) {
acc += ` +\n_Default:_ ${'`' + val.default + '`'}`
}
if (val.deprecated) {
acc += ` +\n\nWARNING: This parameter has been deprecated.`
acc += ' +\n\nWARNING: This parameter has been deprecated.'
}
return acc + '\n\n'
}, '')

View File

@ -235,44 +235,44 @@ function buildMethodDefinition (opts, api, name, hasBody, isHead) {
if (opts.kibana) {
if (hasBody) {
return [
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` }
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' }
]
} else {
return [
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` }
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' }
]
}
}
if (hasBody) {
let methods = [
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' },
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' }
]
if (isSnakeCased(api)) {
methods = methods.concat([
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' },
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' }
])
}
return methods
} else {
let methods = [
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' },
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' }
]
if (isSnakeCased(api)) {
methods = methods.concat([
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise<ApiResponse<TResponse, TContext>>` },
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` },
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: `TransportRequestCallback` }
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' },
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' }
])
}
return methods

View File

@ -33,7 +33,7 @@ const ndjsonApiKey = ndjsonApi
function generate (version, api) {
const release = semver.valid(version) ? semver.major(version) : version
var types = `/*
let types = `/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
@ -79,8 +79,8 @@ export interface Generic {
// get the required parts from the url
// if the url has at least one static path,
// then there are not required parts of the url
var allParts = []
var requiredParts = []
let allParts = []
let requiredParts = []
for (const path of paths) {
if (path.parts) {
allParts.push(Object.keys(path.parts))
@ -146,7 +146,7 @@ export interface ${toPascalCase(name)}${body ? `<T = ${bodyGeneric}>` : ''} exte
case 'time':
case 'timeout':
return 'string'
case 'enum':
case 'enum': {
// the following code changes 'true' | 'false' to boolean
let foundTrue = false
let foundFalse = false
@ -172,6 +172,7 @@ export interface ${toPascalCase(name)}${body ? `<T = ${bodyGeneric}>` : ''} exte
options.push('boolean')
}
return options.join(' | ')
}
case 'int':
case 'double':
case 'long':

View File

@ -16,7 +16,7 @@ test('Request id', t => {
const genReqId = generateRequestId()
t.type(genReqId, 'function')
for (var i = 1; i <= 10; i++) {
for (let i = 1; i <= 10; i++) {
t.strictEqual(genReqId(), i)
}

View File

@ -95,7 +95,7 @@ test('Resurrect a node and handle 502/3/4 status code', t => {
const clock = FakeTimers.install({ toFake: ['Date'] })
const q = workq()
var count = 0
let count = 0
function handler (req, res) {
res.statusCode = count++ < 2 ? 502 : 200
res.setHeader('content-type', 'application/json')
@ -114,7 +114,7 @@ test('Resurrect a node and handle 502/3/4 status code', t => {
maxRetries: 0
})
var idCount = 2
let idCount = 2
client.on(events.RESURRECT, (err, meta) => {
t.error(err)
t.strictEqual(meta.strategy, 'ping')

View File

@ -59,7 +59,7 @@ test('Should update the connection pool', t => {
t.strictEqual(hosts.length, 4)
const ids = Object.keys(nodes)
for (var i = 0; i < hosts.length; i++) {
for (let i = 0; i < hosts.length; i++) {
const id = ids[i]
// the first node will be an update of the existing one
if (id === 'node0') {
@ -118,7 +118,7 @@ test('Should handle hostnames in publish_address', t => {
t.error(err)
t.strictEqual(hosts.length, 4)
for (var i = 0; i < hosts.length; i++) {
for (let i = 0; i < hosts.length; i++) {
// the first node will be an update of the existing one
t.strictEqual(hosts[i].url.hostname, 'localhost')
}

View File

@ -16,7 +16,7 @@ const { bench, beforeEach, afterEach } = require('../suite')({
}
})
var stackoverflow = []
let stackoverflow = []
const stackoverflowPath = join(
__dirname,
'fixtures',
@ -53,7 +53,7 @@ bench('Bulk index documents', {
action: 'bulk'
}, async b => {
b.start()
for (var i = 0; i < stackoverflow.length; i++) {
for (let i = 0; i < stackoverflow.length; i++) {
await b.client.bulk({ body: stackoverflow[i] })
}
b.end()
@ -67,7 +67,7 @@ bench('Complex search request', {
action: 'search'
}, async b => {
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.search({
index: INDEX,
body: {
@ -81,8 +81,8 @@ bench('Complex search request', {
})
function readSOfile () {
var i = 0
var stackoverflow = []
let i = 0
const stackoverflow = []
return new Promise((resolve, reject) => {
createReadStream(stackoverflowPath)
.pipe(split(JSON.parse))

View File

@ -44,7 +44,7 @@ bench('Ping', {
action: 'ping'
}, async b => {
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.ping()
}
b.end()
@ -57,7 +57,7 @@ bench('Create index', {
action: 'indices.create'
}, async b => {
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.indices.create({ index: `test-create-${i}` })
}
b.end()
@ -75,7 +75,7 @@ bench('Index small document', {
await b.client.indices.create({ index })
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.create({
index,
type: '_doc',
@ -98,7 +98,7 @@ bench('Index large document', {
await b.client.indices.create({ index })
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.create({
index,
type: '_doc',
@ -128,7 +128,7 @@ bench('Get small document', {
})
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.get({
index,
type: '_doc',
@ -157,7 +157,7 @@ bench('Get large document', {
})
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.get({
index,
type: '_doc',
@ -187,7 +187,7 @@ bench('Search small document', {
})
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.search({
index,
type: '_doc',
@ -221,7 +221,7 @@ bench('Search large document', {
})
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.search({
index,
type: '_doc',
@ -255,7 +255,7 @@ bench('Update small document', {
})
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await b.client.update({
index,
type: '_doc',

View File

@ -12,7 +12,7 @@ const { connection } = require('../../utils')
bench('Initialization', { warmup: 5, measure: 10, iterations: 1000 }, async b => {
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
const client = new Client({ // eslint-disable-line
node: 'http://localhost:9200'
})
@ -56,7 +56,7 @@ bench('Basic get', { warmup: 5, measure: 10, iterations: 1000 }, async b => {
q: 'foo:bar'
})
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await client.search({
index: 'test',
type: 'doc',
@ -83,7 +83,7 @@ bench('Basic post', { warmup: 5, measure: 10, iterations: 1000 }, async b => {
}
})
b.start()
for (var i = 0; i < b.iterations; i++) {
for (let i = 0; i < b.iterations; i++) {
await client.search({
index: 'test',
type: 'doc',

View File

@ -15,10 +15,10 @@ function buildBenchmark (options = {}) {
const q = workq()
const stats = {}
const reports = []
var beforeEach = null
var afterEach = null
var setup = null
var teardown = null
let beforeEach = null
let afterEach = null
let setup = null
let teardown = null
function setBeforeEach (fn) {
beforeEach = fn
@ -54,7 +54,7 @@ function buildBenchmark (options = {}) {
}
stats[title] = []
var { measure, warmup } = opts
let { measure, warmup } = opts
const b = new B({ iterations: opts.iterations })
q.add(runSetup)
@ -225,7 +225,7 @@ function buildBenchmark (options = {}) {
}
})
for (var i = 0; i < results.length; i++) {
for (let i = 0; i < results.length; i++) {
await client.index({
index: 'benchmarking_results',
type: '_doc',

View File

@ -65,7 +65,7 @@ test('search helper', async t => {
}
})
var count = 0
let count = 0
for await (const search of scrollSearch) {
count += 1
for (const doc of search.documents) {
@ -87,7 +87,7 @@ test('clear a scroll search', async t => {
}
})
var count = 0
let count = 0
for await (const search of scrollSearch) {
count += 1
if (count === 2) {
@ -109,7 +109,7 @@ test('scroll documents', async t => {
}
})
var count = 0
let count = 0
for await (const doc of scrollSearch) {
count += 1
t.true(doc.title.toLowerCase().includes('javascript'))

View File

@ -179,8 +179,8 @@ async function start ({ client, isXPack }) {
// to provide a better test log output
.reduce((arr, file) => {
const path = file.slice(file.indexOf('/rest-api-spec/test'), file.lastIndexOf('/'))
var inserted = false
for (var i = 0; i < arr.length; i++) {
let inserted = false
for (let i = 0; i < arr.length; i++) {
if (arr[i][0].includes(path)) {
inserted = true
arr[i].push(file)
@ -223,8 +223,8 @@ async function start ({ client, isXPack }) {
.filter(Boolean)
// get setup and teardown if present
var setupTest = null
var teardownTest = null
let setupTest = null
let teardownTest = null
for (const test of tests) {
if (test.setup) setupTest = test.setup
if (test.teardown) teardownTest = test.teardown
@ -299,14 +299,15 @@ function log (text) {
}
function now () {
var ts = process.hrtime()
const ts = process.hrtime()
return (ts[0] * 1e3) + (ts[1] / 1e6)
}
function parse (data) {
const schema = yaml.Schema.create(yaml.CORE_SCHEMA, [])
let doc
try {
var doc = yaml.safeLoad(data, { schema })
doc = yaml.safeLoad(data, { schema })
} catch (err) {
console.error(err)
return
@ -329,8 +330,8 @@ function withSHA (sha) {
})
function _withSHA (callback) {
var fresh = false
var retry = 0
let fresh = false
let retry = 0
if (!pathExist(esFolder)) {
if (!createFolder(esFolder)) {
@ -427,10 +428,10 @@ if (require.main === module) {
}
const shouldSkip = (isXPack, file, name) => {
var list = Object.keys(freeSkips)
for (var i = 0; i < list.length; i++) {
let list = Object.keys(freeSkips)
for (let i = 0; i < list.length; i++) {
const freeTest = freeSkips[list[i]]
for (var j = 0; j < freeTest.length; j++) {
for (let j = 0; j < freeTest.length; j++) {
if (file.endsWith(list[i]) && (name === freeTest[j] || freeTest[j] === '*')) {
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
log(`Skipping test ${testName} because is blacklisted in the free test`)
@ -441,9 +442,9 @@ const shouldSkip = (isXPack, file, name) => {
if (file.includes('x-pack') || isXPack) {
list = Object.keys(platinumBlackList)
for (i = 0; i < list.length; i++) {
for (let i = 0; i < list.length; i++) {
const platTest = platinumBlackList[list[i]]
for (j = 0; j < platTest.length; j++) {
for (let j = 0; j < platTest.length; j++) {
if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) {
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
log(`Skipping test ${testName} because is blacklisted in the platinum test`)

View File

@ -320,7 +320,7 @@ function build (opts = {}) {
*/
function set (key, name) {
if (key.includes('_arbitrary_key_')) {
var currentVisit = null
let currentVisit = null
for (const path of key.split('.')) {
if (path === '_arbitrary_key_') {
const keys = Object.keys(currentVisit)
@ -361,8 +361,9 @@ function build (opts = {}) {
*/
async function doAction (action, stats) {
const cmd = parseDo(action)
let api
try {
var api = delve(client, cmd.method).bind(client)
api = delve(client, cmd.method).bind(client)
} catch (err) {
console.error(`\nError: Cannot find the method '${cmd.method}' in the client.\n`)
process.exit(1)
@ -373,8 +374,8 @@ function build (opts = {}) {
if (cmd.params.ignore) delete cmd.params.ignore
const [err, result] = await to(api(cmd.params, options))
var warnings = result ? result.warnings : null
var body = result ? result.body : null
let warnings = result ? result.warnings : null
const body = result ? result.body : null
if (action.warnings && warnings === null) {
assert.fail('We should get a warning header', action.warnings)
@ -719,7 +720,7 @@ function parseDo (action) {
for (const key in obj) {
const val = obj[key]
var newKey = key
let newKey = key
if (!~doNotCamelify.indexOf(key)) {
// if the key starts with `_` we should not camelify the first occurence
// eg: _source_include => _sourceInclude
@ -776,7 +777,7 @@ function parseDoError (err, spec) {
function getSkip (arr) {
if (!Array.isArray(arr)) return null
for (var i = 0; i < arr.length; i++) {
for (let i = 0; i < arr.length; i++) {
if (arr[i].skip) return arr[i].skip
}
return null
@ -822,7 +823,7 @@ function logSkip (action) {
* @returns {boolean}
*/
function shouldSkip (esVersion, action) {
var shouldSkip = false
let shouldSkip = false
// skip based on the version
if (action.version) {
if (action.version.trim() === 'all') return true

View File

@ -63,7 +63,7 @@ test('API', t => {
t.test('markDead', t => {
const pool = new BaseConnectionPool({ Connection, sniffEnabled: true })
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
let connection = pool.addConnection(href)
t.same(pool.markDead(connection), pool)
connection = pool.connections.find(c => c.id === href)
t.strictEqual(connection.status, Connection.statuses.ALIVE)
@ -73,7 +73,7 @@ test('API', t => {
t.test('markAlive', t => {
const pool = new BaseConnectionPool({ Connection, sniffEnabled: true })
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
let connection = pool.addConnection(href)
t.same(pool.markAlive(connection), pool)
connection = pool.connections.find(c => c.id === href)
t.strictEqual(connection.status, Connection.statuses.ALIVE)
@ -96,7 +96,7 @@ test('API', t => {
t.test('removeConnection', t => {
const pool = new BaseConnectionPool({ Connection })
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
const connection = pool.addConnection(href)
pool.removeConnection(connection)
t.strictEqual(pool.size, 0)
t.end()

View File

@ -29,7 +29,7 @@ const {
test('Should create a child client (headers check)', t => {
t.plan(4)
var count = 0
let count = 0
function handler (req, res) {
if (count++ === 0) {
t.match(req.headers, { 'x-foo': 'bar' })
@ -216,12 +216,12 @@ test('Should create a child client (generateRequestId check)', t => {
t.plan(6)
function generateRequestId1 () {
var id = 0
let id = 0
return () => `trace-1-${id++}`
}
function generateRequestId2 () {
var id = 0
let id = 0
return () => `trace-2-${id++}`
}
@ -235,7 +235,7 @@ test('Should create a child client (generateRequestId check)', t => {
generateRequestId: generateRequestId2()
})
var count = 0
let count = 0
client.on('request', (err, { meta }) => {
t.error(err)
t.strictEqual(
@ -266,7 +266,7 @@ test('Should create a child client (name check)', t => {
t.strictEqual(client.name, 'parent')
t.strictEqual(child.name, 'child')
var count = 0
let count = 0
client.on('request', (err, { meta }) => {
t.error(err)
t.strictEqual(
@ -284,7 +284,7 @@ test('Should create a child client (name check)', t => {
test('Should create a child client (auth check)', t => {
t.plan(4)
var count = 0
let count = 0
function handler (req, res) {
if (count++ === 0) {
t.match(req.headers, { authorization: 'Basic Zm9vOmJhcg==' })

View File

@ -287,7 +287,7 @@ test('Authentication', t => {
t.test('Custom basic authentication per request', t => {
t.plan(6)
var first = true
let first = true
function handler (req, res) {
t.match(req.headers, {
authorization: first ? 'hello' : 'Basic Zm9vOmJhcg=='
@ -322,7 +322,7 @@ test('Authentication', t => {
t.test('Override default basic authentication per request', t => {
t.plan(6)
var first = true
let first = true
function handler (req, res) {
t.match(req.headers, {
authorization: first ? 'hello' : 'Basic Zm9vOmJhcg=='
@ -419,7 +419,7 @@ test('Authentication', t => {
t.test('Custom ApiKey authentication per request', t => {
t.plan(6)
var first = true
let first = true
function handler (req, res) {
t.match(req.headers, {
authorization: first ? 'ApiKey Zm9vOmJhcg==' : 'Basic Zm9vOmJhcg=='
@ -454,7 +454,7 @@ test('Authentication', t => {
t.test('Override default ApiKey authentication per request', t => {
t.plan(6)
var first = true
let first = true
function handler (req, res) {
t.match(req.headers, {
authorization: first ? 'hello' : 'ApiKey Zm9vOmJhcg=='
@ -1091,8 +1091,8 @@ test('Correctly handles the same header cased differently', t => {
t.plan(4)
function handler (req, res) {
t.strictEqual(req.headers['authorization'], 'Basic foobar')
t.strictEqual(req.headers['foo'], 'baz')
t.strictEqual(req.headers.authorization, 'Basic foobar')
t.strictEqual(req.headers.foo, 'baz')
res.setHeader('Content-Type', 'application/json;utf=8')
res.end(JSON.stringify({ hello: 'world' }))
}

View File

@ -66,7 +66,7 @@ test('API', t => {
t.test('markDead', t => {
const pool = new ConnectionPool({ Connection, sniffEnabled: true })
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
let connection = pool.addConnection(href)
pool.markDead(connection)
connection = pool.connections.find(c => c.id === href)
t.strictEqual(connection.deadCount, 1)
@ -100,7 +100,7 @@ test('API', t => {
t.test('markAlive', t => {
const pool = new ConnectionPool({ Connection, sniffEnabled: true })
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
let connection = pool.addConnection(href)
pool.markDead(connection)
pool.markAlive(connection)
connection = pool.connections.find(c => c.id === href)
@ -121,7 +121,7 @@ test('API', t => {
sniffEnabled: true
})
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
const connection = pool.addConnection(href)
pool.markDead(connection)
const opts = {
now: Date.now() + 1000 * 60 * 3,
@ -147,7 +147,7 @@ test('API', t => {
sniffEnabled: true
})
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
const connection = pool.addConnection(href)
pool.markDead(connection)
const opts = {
now: Date.now() + 1000 * 60 * 3,
@ -175,7 +175,7 @@ test('API', t => {
sniffEnabled: true
})
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
const connection = pool.addConnection(href)
pool.markDead(connection)
const opts = {
now: Date.now() + 1000 * 60 * 3,
@ -200,7 +200,7 @@ test('API', t => {
sniffEnabled: true
})
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
const connection = pool.addConnection(href)
pool.markDead(connection)
const opts = {
now: Date.now() + 1000 * 60 * 3,
@ -292,7 +292,7 @@ test('API', t => {
t.test('removeConnection', t => {
const pool = new ConnectionPool({ Connection })
const href = 'http://localhost:9200/'
var connection = pool.addConnection(href)
const connection = pool.addConnection(href)
t.ok(pool.getConnection() instanceof Connection)
pool.removeConnection(connection)
t.strictEqual(pool.getConnection(), null)

View File

@ -58,7 +58,7 @@ test('Basic (http)', t => {
connection: 'keep-alive'
})
var payload = ''
let payload = ''
res.setEncoding('utf8')
res.on('data', chunk => { payload += chunk })
res.on('error', err => t.fail(err))
@ -98,7 +98,7 @@ test('Basic (https)', t => {
connection: 'keep-alive'
})
var payload = ''
let payload = ''
res.setEncoding('utf8')
res.on('data', chunk => { payload += chunk })
res.on('error', err => t.fail(err))
@ -139,7 +139,7 @@ test('Basic (https with ssl agent)', t => {
connection: 'keep-alive'
})
var payload = ''
let payload = ''
res.setEncoding('utf8')
res.on('data', chunk => { payload += chunk })
res.on('error', err => t.fail(err))
@ -193,7 +193,7 @@ test('Custom http agent', t => {
connection: 'keep-alive'
})
var payload = ''
let payload = ''
res.setEncoding('utf8')
res.on('data', chunk => { payload += chunk })
res.on('error', err => t.fail(err))
@ -317,7 +317,7 @@ test('Body request', t => {
t.plan(2)
function handler (req, res) {
var payload = ''
let payload = ''
req.setEncoding('utf8')
req.on('data', chunk => { payload += chunk })
req.on('error', err => t.fail(err))
@ -346,7 +346,7 @@ test('Send body as buffer', t => {
t.plan(2)
function handler (req, res) {
var payload = ''
let payload = ''
req.setEncoding('utf8')
req.on('data', chunk => { payload += chunk })
req.on('error', err => t.fail(err))
@ -375,7 +375,7 @@ test('Send body as stream', t => {
t.plan(2)
function handler (req, res) {
var payload = ''
let payload = ''
req.setEncoding('utf8')
req.on('data', chunk => { payload += chunk })
req.on('error', err => t.fail(err))
@ -424,7 +424,7 @@ test('Should not close a connection if there are open requests', t => {
t.error(err)
t.strictEqual(connection._openRequests, 0)
var payload = ''
let payload = ''
res.setEncoding('utf8')
res.on('data', chunk => { payload += chunk })
res.on('error', err => t.fail(err))
@ -461,7 +461,7 @@ test('Should not close a connection if there are open requests (with agent disab
t.error(err)
t.strictEqual(connection._openRequests, 0)
var payload = ''
let payload = ''
res.setEncoding('utf8')
res.on('data', chunk => { payload += chunk })
res.on('error', err => t.fail(err))
@ -571,7 +571,7 @@ test('asStream set to true', t => {
}, (err, res) => {
t.error(err)
var payload = ''
let payload = ''
res.setEncoding('utf8')
res.on('data', chunk => { payload += chunk })
res.on('error', err => t.fail(err))
@ -887,7 +887,7 @@ test('Should correctly resolve request pathname', t => {
t.plan(1)
const connection = new Connection({
url: new URL(`http://localhost:80/test`)
url: new URL('http://localhost:80/test')
})
t.strictEqual(

View File

@ -20,7 +20,6 @@
'use strict'
const { test } = require('tap')
const semver = require('semver')
const { Client, events } = require('../../index')
const { TimeoutError } = require('../../lib/errors')
const {
@ -127,7 +126,7 @@ test('Should emit a request event once when a request is performed', t => {
})
})
test('Remove an event', { skip: semver.lt(process.versions.node, '10.0.0') }, t => {
test('Remove an event', t => {
t.plan(4)
const client = new Client({

View File

@ -23,7 +23,6 @@ const { createReadStream } = require('fs')
const { join } = require('path')
const split = require('split2')
const FakeTimers = require('@sinonjs/fake-timers')
const semver = require('semver')
const { test } = require('tap')
const { Client, errors } = require('../../../')
const { buildServer, connection } = require('../../utils')
@ -308,10 +307,6 @@ test('bulk index', t => {
})
t.test('Should perform a bulk request (retry)', async t => {
if (semver.lt(process.versions.node, '10.0.0')) {
t.skip('This test will not pass on Node v8')
return
}
async function handler (req, res) {
t.strictEqual(req.url, '/_bulk')
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
@ -430,10 +425,6 @@ test('bulk index', t => {
})
t.test('Should perform a bulk request (failure)', async t => {
if (semver.lt(process.versions.node, '10.0.0')) {
t.skip('This test will not pass on Node v8')
return
}
async function handler (req, res) {
t.strictEqual(req.url, '/_bulk')
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
@ -575,10 +566,6 @@ test('bulk index', t => {
})
t.test('Should abort a bulk request', async t => {
if (semver.lt(process.versions.node, '10.0.0')) {
t.skip('This test will not pass on Node v8')
return
}
async function handler (req, res) {
t.strictEqual(req.url, '/_bulk')
t.match(req.headers, { 'content-type': 'application/x-ndjson' })
@ -667,7 +654,7 @@ test('bulk index', t => {
})
.catch(err => {
t.true(err instanceof errors.ConfigurationError)
t.is(err.message, `Bulk helper invalid action: 'foo'`)
t.is(err.message, 'Bulk helper invalid action: \'foo\'')
})
})

View File

@ -29,7 +29,7 @@ if (clientVersion.includes('-')) {
const nodeVersion = process.versions.node
test('Scroll search', async t => {
var count = 0
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.match(params.headers, {
@ -52,10 +52,10 @@ test('Scroll search', async t => {
hits: count === 3
? []
: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}
}
@ -79,7 +79,7 @@ test('Scroll search', async t => {
})
test('Clear a scroll search', async t => {
var count = 0
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
t.notMatch(params.headers, {
@ -129,7 +129,7 @@ test('Clear a scroll search', async t => {
})
test('Scroll search (retry)', async t => {
var count = 0
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
count += 1
@ -149,10 +149,10 @@ test('Scroll search (retry)', async t => {
hits: count === 4
? []
: [
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
{ _source: { one: 'one' } },
{ _source: { two: 'two' } },
{ _source: { three: 'three' } }
]
}
}
}
@ -181,7 +181,7 @@ test('Scroll search (retry)', async t => {
test('Scroll search (retry throws and maxRetries)', async t => {
const maxRetries = 5
const expectedAttempts = maxRetries + 1
var count = 0
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
count += 1
@ -217,7 +217,7 @@ test('Scroll search (retry throws and maxRetries)', async t => {
test('Scroll search (retry throws later)', async t => {
const maxRetries = 5
const expectedAttempts = maxRetries + 2
var count = 0
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
count += 1
@ -268,7 +268,7 @@ test('Scroll search (retry throws later)', async t => {
})
test('Scroll search documents', async t => {
var count = 0
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
if (count === 0) {
@ -287,10 +287,10 @@ test('Scroll search documents', async t => {
hits: count === 3
? []
: [
{ _source: { val: 1 * count } },
{ _source: { val: 2 * count } },
{ _source: { val: 3 * count } }
]
{ _source: { val: 1 * count } },
{ _source: { val: 2 * count } },
{ _source: { val: 3 * count } }
]
}
}
}
@ -321,7 +321,7 @@ test('Scroll search documents', async t => {
test('Should not retry if maxRetries = 0', async t => {
const maxRetries = 0
const expectedAttempts = 1
var count = 0
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
count += 1
@ -355,7 +355,7 @@ test('Should not retry if maxRetries = 0', async t => {
})
test('Fix querystring for scroll search', async t => {
var count = 0
let count = 0
const MockConnection = connection.buildMockConnection({
onRequest (params) {
if (count === 0) {
@ -372,8 +372,8 @@ test('Fix querystring for scroll search', async t => {
hits: count === 3
? []
: [
{ _source: { val: count } }
]
{ _source: { val: count } }
]
}
}
}

View File

@ -27,7 +27,7 @@ test('RoundRobinSelector', t => {
const arr = [0, 1, 2, 3, 4, 5]
t.plan(arr.length + 1)
for (var i = 0; i <= arr.length; i++) {
for (let i = 0; i <= arr.length; i++) {
t.strictEqual(
selector(arr),
i === arr.length ? arr[0] : arr[i]

View File

@ -166,7 +166,7 @@ test('Send POST', t => {
'content-type': 'application/json',
'content-length': '17'
})
var json = ''
let json = ''
req.setEncoding('utf8')
req.on('data', chunk => { json += chunk })
req.on('error', err => t.fail(err))
@ -217,7 +217,7 @@ test('Send POST (ndjson)', t => {
'content-type': 'application/x-ndjson',
'content-length': '67'
})
var json = ''
let json = ''
req.setEncoding('utf8')
req.on('data', chunk => { json += chunk })
req.on('error', err => t.fail(err))
@ -265,7 +265,7 @@ test('Send stream', t => {
t.match(req.headers, {
'content-type': 'application/json'
})
var json = ''
let json = ''
req.setEncoding('utf8')
req.on('data', chunk => { json += chunk })
req.on('error', err => t.fail(err))
@ -308,7 +308,7 @@ test('Send stream (bulkBody)', t => {
t.match(req.headers, {
'content-type': 'application/x-ndjson'
})
var json = ''
let json = ''
req.setEncoding('utf8')
req.on('data', chunk => { json += chunk })
req.on('error', err => t.fail(err))
@ -587,7 +587,7 @@ test('ConnectionError (should call markDead on the failing connection)', t => {
test('Retry mechanism', t => {
t.plan(2)
var count = 0
let count = 0
function handler (req, res) {
res.setHeader('Content-Type', 'application/json;utf=8')
if (count > 0) {
@ -635,7 +635,7 @@ test('Retry mechanism', t => {
test('Should not retry if the body is a stream', t => {
t.plan(2)
var count = 0
let count = 0
function handler (req, res) {
count++
res.setHeader('Content-Type', 'application/json;utf=8')
@ -680,7 +680,7 @@ test('Should not retry if the body is a stream', t => {
test('Should not retry if the bulkBody is a stream', t => {
t.plan(2)
var count = 0
let count = 0
function handler (req, res) {
count++
res.setHeader('Content-Type', 'application/json;utf=8')
@ -725,7 +725,7 @@ test('Should not retry if the bulkBody is a stream', t => {
test('No retry', t => {
t.plan(2)
var count = 0
let count = 0
function handler (req, res) {
count++
res.setHeader('Content-Type', 'application/json;utf=8')
@ -772,7 +772,7 @@ test('No retry', t => {
test('Custom retry mechanism', t => {
t.plan(2)
var count = 0
let count = 0
function handler (req, res) {
res.setHeader('Content-Type', 'application/json;utf=8')
if (count > 0) {
@ -822,7 +822,7 @@ test('Custom retry mechanism', t => {
test('Should not retry on 429', t => {
t.plan(3)
var count = 0
let count = 0
function handler (req, res) {
t.strictEqual(count++, 0)
res.statusCode = 429
@ -988,7 +988,7 @@ test('Retry mechanism and abort', t => {
id: 'node3'
}])
var count = 0
let count = 0
const transport = new Transport({
emit: event => {
if (event === 'request' && count++ > 0) {
@ -1294,7 +1294,7 @@ test('Should retry the request if the statusCode is 502/3/4', t => {
t.test(statusCode, t => {
t.plan(3)
var first = true
let first = true
function handler (req, res) {
if (first) {
first = false
@ -1886,7 +1886,7 @@ test('asStream set to true', t => {
'content-type': 'application/json;utf=8'
})
var payload = ''
let payload = ''
body.setEncoding('utf8')
body.on('data', chunk => { payload += chunk })
body.on('error', err => t.fail(err))
@ -1906,7 +1906,7 @@ test('Compress request', t => {
'content-type': 'application/json',
'content-encoding': 'gzip'
})
var json = ''
let json = ''
req
.pipe(createGunzip())
.on('data', chunk => { json += chunk })
@ -1953,7 +1953,7 @@ test('Compress request', t => {
'content-type': 'application/json',
'content-encoding': 'gzip'
})
var json = ''
let json = ''
req
.pipe(createGunzip())
.on('data', chunk => { json += chunk })
@ -1999,7 +1999,7 @@ test('Compress request', t => {
'content-type': 'application/json',
'content-encoding': 'gzip'
})
var json = ''
let json = ''
req
.pipe(createGunzip())
.on('data', chunk => { json += chunk })
@ -2112,13 +2112,13 @@ test('Compress request', t => {
t.test('Retry a gzipped body', t => {
t.plan(7)
var count = 0
let count = 0
function handler (req, res) {
t.match(req.headers, {
'content-type': 'application/json',
'content-encoding': 'gzip'
})
var json = ''
let json = ''
req
.pipe(createGunzip())
.on('data', chunk => { json += chunk })

View File

@ -30,7 +30,7 @@ const intoStream = require('into-stream')
class MockConnection extends Connection {
request (params, callback) {
var aborted = false
let aborted = false
const stream = intoStream(JSON.stringify({ hello: 'world' }))
stream.statusCode = setStatusCode(params.path)
stream.headers = {
@ -54,7 +54,7 @@ class MockConnection extends Connection {
class MockConnectionTimeout extends Connection {
request (params, callback) {
var aborted = false
let aborted = false
process.nextTick(() => {
if (!aborted) {
callback(new TimeoutError('Request timed out', params), null)
@ -70,7 +70,7 @@ class MockConnectionTimeout extends Connection {
class MockConnectionError extends Connection {
request (params, callback) {
var aborted = false
let aborted = false
process.nextTick(() => {
if (!aborted) {
callback(new ConnectionError('Kaboom'), null)
@ -86,7 +86,7 @@ class MockConnectionError extends Connection {
class MockConnectionSniff extends Connection {
request (params, callback) {
var aborted = false
let aborted = false
const sniffResult = {
nodes: {
'node-1': {
@ -133,11 +133,11 @@ function buildMockConnection (opts) {
class MockConnection extends Connection {
request (params, callback) {
var { body, statusCode } = opts.onRequest(params)
let { body, statusCode } = opts.onRequest(params)
if (typeof body !== 'string') {
body = JSON.stringify(body)
}
var aborted = false
let aborted = false
const stream = intoStream(body)
stream.statusCode = statusCode || 200
stream.headers = {

View File

@ -23,7 +23,7 @@ const debug = require('debug')('elasticsearch-test')
const workq = require('workq')
const buildServer = require('./buildServer')
var id = 0
let id = 0
function buildCluster (options, callback) {
const clusterId = id++
debug(`Booting cluster '${clusterId}'`)
@ -37,7 +37,7 @@ function buildCluster (options, callback) {
const sniffResult = { nodes: {} }
options.numberOfNodes = options.numberOfNodes || 4
for (var i = 0; i < options.numberOfNodes; i++) {
for (let i = 0; i < options.numberOfNodes; i++) {
q.add(bootNode, { id: `node${i}` })
}

View File

@ -35,7 +35,7 @@ const secureOpts = {
cert: readFileSync(join(__dirname, '..', 'fixtures', 'https.cert'), 'utf8')
}
var id = 0
let id = 0
function buildServer (handler, opts, cb) {
const serverId = id++
debug(`Booting server '${serverId}'`)