From 28370acf49195b9e1ad0ada86a69f1514f6efad0 Mon Sep 17 00:00:00 2001 From: Tomas Della Vedova Date: Fri, 19 Feb 2021 08:27:20 +0100 Subject: [PATCH] Remove Node.js v8 support (#1402) --- .ci/test-matrix.yml | 1 - .github/workflows/nodejs.yml | 25 ------------- README.md | 2 +- api/utils.js | 8 ++--- docs/examples/asStream.asciidoc | 6 ++-- docs/examples/scroll.asciidoc | 4 +-- docs/examples/sql.query.asciidoc | 2 +- docs/installation.asciidoc | 2 +- index.js | 31 +++++----------- lib/Connection.js | 4 +-- lib/Serializer.js | 14 ++++---- lib/Transport.js | 13 +++---- lib/pool/BaseConnectionPool.js | 6 ++-- lib/pool/ConnectionPool.js | 9 +++-- package.json | 48 +++++++++++++------------ scripts/utils/clone-es.js | 4 +-- scripts/utils/generateApis.js | 24 ++++++------- scripts/utils/generateDocs.js | 8 ++--- scripts/utils/generateMain.js | 36 +++++++++---------- scripts/utils/generateRequestTypes.js | 9 ++--- test/acceptance/observability.test.js | 2 +- test/acceptance/resurrect.test.js | 4 +-- test/acceptance/sniff.test.js | 4 +-- test/benchmarks/macro/complex.bench.js | 10 +++--- test/benchmarks/macro/simple.bench.js | 18 +++++----- test/benchmarks/micro/basic.bench.js | 6 ++-- test/benchmarks/suite.js | 12 +++---- test/integration/helpers/scroll.test.js | 6 ++-- test/integration/index.js | 27 +++++++------- test/integration/test-runner.js | 15 ++++---- test/unit/base-connection-pool.test.js | 6 ++-- test/unit/child.test.js | 12 +++---- test/unit/client.test.js | 8 ++--- test/unit/connection-pool.test.js | 14 ++++---- test/unit/connection.test.js | 20 +++++------ test/unit/events.test.js | 3 +- test/unit/helpers/bulk.test.js | 13 ------- test/unit/helpers/scroll.test.js | 44 +++++++++++------------ test/unit/selectors.test.js | 2 +- test/unit/transport.test.js | 36 +++++++++---------- test/utils/MockConnection.js | 12 +++---- test/utils/buildCluster.js | 4 +-- test/utils/buildServer.js | 2 +- 43 files changed, 246 insertions(+), 290 deletions(-) diff --git a/.ci/test-matrix.yml b/.ci/test-matrix.yml index aff595c22..a10fea1d3 100644 --- a/.ci/test-matrix.yml +++ b/.ci/test-matrix.yml @@ -6,7 +6,6 @@ NODE_JS_VERSION: - 14 - 12 - 10 - - 8 TEST_SUITE: - free diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml index f0c6ab7a5..73f75a386 100644 --- a/.github/workflows/nodejs.yml +++ b/.github/workflows/nodejs.yml @@ -40,31 +40,6 @@ jobs: run: | npm run test:types - test-node-v8: - name: Test - runs-on: ${{ matrix.os }} - - strategy: - matrix: - node-version: [8.x] - os: [ubuntu-latest, windows-latest, macOS-latest] - - steps: - - uses: actions/checkout@v2 - - - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - - name: Install - run: | - npm install - - - name: Test - run: | - npm run test:node8 - helpers-integration-test: name: Helpers integration test runs-on: ubuntu-latest diff --git a/README.md b/README.md index 1e6c41716..a98900bf1 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ npm install @elastic/elasticsearch ### Node.js support -NOTE: The minimum supported version of Node.js is `v8`. +NOTE: The minimum supported version of Node.js is `v10`. The client versioning follows the Elastc Stack versioning, this means that major, minor, and patch releases are done following a precise schedule that diff --git a/api/utils.js b/api/utils.js index 1982d68da..bf18fc9fe 100644 --- a/api/utils.js +++ b/api/utils.js @@ -31,10 +31,10 @@ function handleError (err, callback) { } function snakeCaseKeys (acceptedQuerystring, snakeCase, querystring) { - var target = {} - var keys = Object.keys(querystring) - for (var i = 0, len = keys.length; i < len; i++) { - var key = keys[i] + const target = {} + const keys = Object.keys(querystring) + for (let i = 0, len = keys.length; i < len; i++) { + const key = keys[i] target[snakeCase[key] || key] = querystring[key] } return target diff --git a/docs/examples/asStream.asciidoc b/docs/examples/asStream.asciidoc index df66744e0..90975a262 100644 --- a/docs/examples/asStream.asciidoc +++ b/docs/examples/asStream.asciidoc @@ -57,7 +57,7 @@ async function run () { }) // stream async iteration, available in Node.js ≥ 10 - var payload = '' + let payload = '' body.setEncoding('utf8') for await (const chunk of body) { payload += chunk @@ -65,7 +65,7 @@ async function run () { console.log(JSON.parse(payload)) // classic stream callback style - var payload = '' + let payload = '' body.setEncoding('utf8') body.on('data', chunk => { payload += chunk }) body.on('error', console.log) @@ -101,4 +101,4 @@ fastify.post('/search/:index', async (req, reply) => { }) fastify.listen(3000) ----- \ No newline at end of file +---- diff --git a/docs/examples/scroll.asciidoc b/docs/examples/scroll.asciidoc index dd8ebefd3..a1373af71 100644 --- a/docs/examples/scroll.asciidoc +++ b/docs/examples/scroll.asciidoc @@ -124,7 +124,7 @@ const client = new Client({ node: 'http://localhost:9200' }) // Scroll utility async function * scrollSearch (params) { - var response = await client.search(params) + let response = await client.search(params) while (true) { const sourceHits = response.body.hits.hits @@ -190,4 +190,4 @@ async function run () { } run().catch(console.log) ----- \ No newline at end of file +---- diff --git a/docs/examples/sql.query.asciidoc b/docs/examples/sql.query.asciidoc index e6bee5f4f..651cb3c3d 100644 --- a/docs/examples/sql.query.asciidoc +++ b/docs/examples/sql.query.asciidoc @@ -51,7 +51,7 @@ async function run () { const data = body.rows.map(row => { const obj = {} - for (var i = 0; i < row.length; i++) { + for (let i = 0; i < row.length; i++) { obj[body.columns[i].name] = row[i] } return obj diff --git a/docs/installation.asciidoc b/docs/installation.asciidoc index a8a9f292b..83628a3d6 100644 --- a/docs/installation.asciidoc +++ b/docs/installation.asciidoc @@ -24,7 +24,7 @@ To learn more about the supported major versions, please refer to the [[nodejs-support]] === Node.js support -NOTE: The minimum supported version of Node.js is `v8`. +NOTE: The minimum supported version of Node.js is `v10`. The client versioning follows the {stack} versioning, this means that major, minor, and patch releases are done following a precise schedule that diff --git a/index.js b/index.js index b68f892cc..76b517a1b 100644 --- a/index.js +++ b/index.js @@ -27,8 +27,7 @@ const debug = require('debug')('elasticsearch') const Transport = require('./lib/Transport') const Connection = require('./lib/Connection') const { ConnectionPool, CloudConnectionPool } = require('./lib/pool') -// Helpers works only in Node.js >= 10 -const Helpers = nodeMajor < 10 ? /* istanbul ignore next */ null : require('./lib/Helpers') +const Helpers = require('./lib/Helpers') const Serializer = require('./lib/Serializer') const errors = require('./lib/errors') const { ConfigurationError } = errors @@ -48,15 +47,6 @@ const kEventEmitter = Symbol('elasticsearchjs-event-emitter') const ESAPI = require('./api') -/* istanbul ignore next */ -if (nodeMajor < 10) { - process.emitWarning('You are using a version of Node.js that is currently in EOL. ' + - 'The support for this version will be dropped in 7.12. ' + - 'Please refer to https://ela.st/nodejs-support for additional information.', - 'DeprecationWarning' - ) -} - /* istanbul ignore next */ if (nodeMajor >= 10 && nodeMajor < 12) { process.emitWarning('You are using a version of Node.js that will reach EOL in April 2021. ' + @@ -189,16 +179,13 @@ class Client extends ESAPI { context: options.context }) - /* istanbul ignore else */ - if (Helpers !== null) { - this.helpers = new Helpers({ - client: this, - maxRetries: options.maxRetries, - metaHeader: options.enableMetaHeader - ? `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}` - : null - }) - } + this.helpers = new Helpers({ + client: this, + maxRetries: options.maxRetries, + metaHeader: options.enableMetaHeader + ? `es=${clientVersion},js=${nodeVersion},t=${clientVersion},hc=${nodeVersion}` + : null + }) } get emit () { @@ -223,7 +210,7 @@ class Client extends ESAPI { opts = {} } - var [namespace, method] = name.split('.') + let [namespace, method] = name.split('.') if (method == null) { method = namespace namespace = null diff --git a/lib/Connection.js b/lib/Connection.js index 011644380..cb5c7c7f7 100644 --- a/lib/Connection.js +++ b/lib/Connection.js @@ -213,8 +213,8 @@ class Connection { } const paramsKeys = Object.keys(params) - for (var i = 0, len = paramsKeys.length; i < len; i++) { - var key = paramsKeys[i] + for (let i = 0, len = paramsKeys.length; i < len; i++) { + const key = paramsKeys[i] if (key === 'path') { request.pathname = resolve(request.pathname, params[key]) } else if (key === 'querystring' && !!params[key] === true) { diff --git a/lib/Serializer.js b/lib/Serializer.js index c2cd41b68..8aa3d9e7c 100644 --- a/lib/Serializer.js +++ b/lib/Serializer.js @@ -27,8 +27,9 @@ const { SerializationError, DeserializationError } = require('./errors') class Serializer { serialize (object) { debug('Serializing', object) + let json try { - var json = JSON.stringify(object) + json = JSON.stringify(object) } catch (err) { throw new SerializationError(err.message, object) } @@ -37,8 +38,9 @@ class Serializer { deserialize (json) { debug('Deserializing', json) + let object try { - var object = sjson.parse(json) + object = sjson.parse(json) } catch (err) { throw new DeserializationError(err.message, json) } @@ -50,8 +52,8 @@ class Serializer { if (Array.isArray(array) === false) { throw new SerializationError('The argument provided is not an array') } - var ndjson = '' - for (var i = 0, len = array.length; i < len; i++) { + let ndjson = '' + for (let i = 0, len = array.length; i < len; i++) { if (typeof array[i] === 'string') { ndjson += array[i] + '\n' } else { @@ -67,8 +69,8 @@ class Serializer { if (typeof object === 'string') return object // arrays should be serialized as comma separated list const keys = Object.keys(object) - for (var i = 0, len = keys.length; i < len; i++) { - var key = keys[i] + for (let i = 0, len = keys.length; i < len; i++) { + const key = keys[i] // elasticsearch will complain for keys without a value if (object[key] === undefined) { delete object[key] diff --git a/lib/Transport.js b/lib/Transport.js index 46c1e3986..a61ce0d3a 100644 --- a/lib/Transport.js +++ b/lib/Transport.js @@ -91,7 +91,7 @@ class Transport { callback = options options = {} } - var p = null + let p = null // promises support if (callback === undefined) { @@ -147,9 +147,10 @@ class Transport { // the size of the stream, we risk to take too much memory. // Furthermore, copying everytime the stream is very a expensive operation. const maxRetries = isStream(params.body) || isStream(params.bulkBody) - ? 0 : (typeof options.maxRetries === 'number' ? options.maxRetries : this.maxRetries) + ? 0 + : (typeof options.maxRetries === 'number' ? options.maxRetries : this.maxRetries) const compression = options.compression !== undefined ? options.compression : this.compression - var request = { abort: noop } + let request = { abort: noop } const transportReturn = { then (onFulfilled, onRejected) { return p.then(onFulfilled, onRejected) @@ -525,7 +526,7 @@ function defaultNodeFilter (node) { } function roundRobinSelector () { - var current = -1 + let current = -1 return function _roundRobinSelector (connections) { if (++current >= connections.length) { current = 0 @@ -540,8 +541,8 @@ function randomSelector (connections) { } function generateRequestId () { - var maxInt = 2147483647 - var nextReqId = 0 + const maxInt = 2147483647 + let nextReqId = 0 return function genReqId (params, options) { return (nextReqId = (nextReqId + 1) & maxInt) } diff --git a/lib/pool/BaseConnectionPool.js b/lib/pool/BaseConnectionPool.js index 8975ea2be..2b3081153 100644 --- a/lib/pool/BaseConnectionPool.js +++ b/lib/pool/BaseConnectionPool.js @@ -128,7 +128,7 @@ class BaseConnectionPool { */ empty (callback) { debug('Emptying the connection pool') - var openConnections = this.size + let openConnections = this.size this.connections.forEach(connection => { connection.close(() => { if (--openConnections === 0) { @@ -201,7 +201,7 @@ class BaseConnectionPool { const ids = Object.keys(nodes) const hosts = [] - for (var i = 0, len = ids.length; i < len; i++) { + for (let i = 0, len = ids.length; i < len; i++) { const node = nodes[ids[i]] // If there is no protocol in // the `publish_address` new URL will throw @@ -210,7 +210,7 @@ class BaseConnectionPool { // - hostname/ip:port // if we encounter the second case, we should // use the hostname instead of the ip - var address = node.http.publish_address + let address = node.http.publish_address const parts = address.split('/') // the url is in the form of hostname/ip:port if (parts.length > 1) { diff --git a/lib/pool/ConnectionPool.js b/lib/pool/ConnectionPool.js index 5a6c09a6b..6fd6fc7db 100644 --- a/lib/pool/ConnectionPool.js +++ b/lib/pool/ConnectionPool.js @@ -80,7 +80,7 @@ class ConnectionPool extends BaseConnectionPool { // list a node that no longer exist. The following check verify // that the connection is still part of the pool before // marking it as dead. - for (var i = 0; i < this.size; i++) { + for (let i = 0; i < this.size; i++) { if (this.connections[i].id === id) { this.dead.push(id) break @@ -138,7 +138,7 @@ class ConnectionPool extends BaseConnectionPool { path: '/', timeout: this.pingTimeout }, (err, response) => { - var isAlive = true + let isAlive = true const statusCode = response !== null ? response.statusCode : 0 if (err != null || (statusCode === 502 || statusCode === 503 || statusCode === 504)) { @@ -170,8 +170,7 @@ class ConnectionPool extends BaseConnectionPool { isAlive: true, connection }) - // eslint-disable-next-line standard/no-callback-literal - callback(true, connection) + callback(true, connection) // eslint-disable-line } } @@ -199,7 +198,7 @@ class ConnectionPool extends BaseConnectionPool { // TODO: can we cache this? const connections = [] - for (var i = 0; i < this.size; i++) { + for (let i = 0; i < this.size; i++) { const connection = this.connections[i] if (noAliveConnections || connection.status === Connection.statuses.ALIVE) { if (filter(connection) === true) { diff --git a/package.json b/package.json index 676fcfd62..dbca618ca 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,6 @@ ], "scripts": { "test": "npm run lint && tap test/{unit,acceptance}/{*,**/*}.test.js && npm run test:types", - "test:node8": "npm run lint && tap test/{unit,acceptance}/*.test.js", "test:unit": "tap test/unit/{*,**/*}.test.js", "test:acceptance": "tap test/acceptance/*.test.js", "test:integration": "node test/integration/index.js", @@ -49,37 +48,37 @@ }, "devDependencies": { "@sinonjs/fake-timers": "github:sinonjs/fake-timers#0bfffc1", - "@types/node": "^12.6.2", + "@types/node": "^14.14.28", "convert-hrtime": "^3.0.0", "dedent": "^0.7.0", - "deepmerge": "^4.0.0", + "deepmerge": "^4.2.2", "dezalgo": "^1.0.3", - "fast-deep-equal": "^3.1.1", - "into-stream": "^5.1.1", - "js-yaml": "^3.13.1", + "fast-deep-equal": "^3.1.3", + "into-stream": "^6.0.0", + "js-yaml": "^4.0.0", "license-checker": "^25.0.1", - "minimist": "^1.2.0", - "ora": "^3.4.0", + "minimist": "^1.2.5", + "ora": "^5.3.0", "pretty-hrtime": "^1.0.3", "proxy": "^1.0.2", - "rimraf": "^2.6.3", - "semver": "^6.0.0", - "simple-git": "^1.110.0", - "simple-statistics": "^7.0.2", - "split2": "^3.1.1", - "standard": "^13.0.2", + "rimraf": "^3.0.2", + "semver": "^7.3.4", + "simple-git": "^2.35.0", + "simple-statistics": "^7.4.1", + "split2": "^3.2.2", + "standard": "^16.0.3", "stoppable": "^1.1.0", - "tap": "^14.4.1", - "tsd": "^0.13.1", - "workq": "^2.1.0", - "xmlbuilder2": "^2.1.2" + "tap": "^14.11.0", + "tsd": "^0.14.0", + "workq": "^3.0.0", + "xmlbuilder2": "^2.4.0" }, "dependencies": { - "debug": "^4.1.1", + "debug": "^4.3.1", "hpagent": "^0.1.1", - "ms": "^2.1.1", + "ms": "^2.1.3", "pump": "^3.0.0", - "secure-json-parse": "^2.1.0" + "secure-json-parse": "^2.3.1" }, "license": "Apache-2.0", "repository": { @@ -90,11 +89,16 @@ "url": "https://github.com/elastic/elasticsearch-js/issues" }, "engines": { - "node": ">=8" + "node": ">=10" }, "tsd": { "directory": "test/types" }, + "standard": { + "ignore": [ + "/api" + ] + }, "tap": { "esm": false, "ts": false, diff --git a/scripts/utils/clone-es.js b/scripts/utils/clone-es.js index 05608eeaa..09f078918 100644 --- a/scripts/utils/clone-es.js +++ b/scripts/utils/clone-es.js @@ -42,8 +42,8 @@ function cloneAndCheckout (opts, callback) { * @param {function} callback */ function withTag (tag, callback) { - var fresh = false - var retry = 0 + let fresh = false + let retry = 0 if (!pathExist(esFolder)) { if (!createFolder(esFolder)) { diff --git a/scripts/utils/generateApis.js b/scripts/utils/generateApis.js index 52b665fb9..925fd2b84 100644 --- a/scripts/utils/generateApis.js +++ b/scripts/utils/generateApis.js @@ -208,7 +208,7 @@ function generateSingleApi (version, spec, common) { // get the required parts from the url // if the url has at least one static path, // then there are not required parts of the url - var allParts = [] + let allParts = [] for (const path of paths) { if (path.parts) { allParts.push(Object.keys(path.parts)) @@ -252,10 +252,10 @@ function generateSingleApi (version, spec, common) { ${genUrlValidation(paths, api)} - var { ${genQueryBlacklist(false)}, ...querystring } = params + let { ${genQueryBlacklist(false)}, ...querystring } = params querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring) - var path = '' + let path = '' ${buildPath(api)} // build request object @@ -291,7 +291,7 @@ function generateSingleApi (version, spec, common) { return code.join('\n ') function _genRequiredCheck (param) { - var camelCased = param[0] === '_' + const camelCased = param[0] === '_' ? '_' + param.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase()) : param.replace(/_([a-z])/g, k => k[1].toUpperCase()) @@ -388,7 +388,7 @@ function generateSingleApi (version, spec, common) { return path.length > 0 ? ('\'/\' + ' + path) : '\'/\'' } - var hasStaticPath = false + let hasStaticPath = false const sortedPaths = paths // some legacy API have mutliple statis paths // this filter removes them @@ -403,8 +403,8 @@ function generateSingleApi (version, spec, common) { // sort by number of parameters (desc) .sort((a, b) => Object.keys(b.parts || {}).length - Object.keys(a.parts || {}).length) - var code = '' - for (var i = 0; i < sortedPaths.length; i++) { + let code = '' + for (let i = 0; i < sortedPaths.length; i++) { const { path, methods } = sortedPaths[i] if (sortedPaths.length === 1) { code += `if (method == null) method = ${generatePickMethod(methods)} @@ -492,13 +492,13 @@ function genUrlValidation (paths, api) { .map(s => s.slice(1, -1)) .reverse() - var code = '' + let code = '' const len = chunks.length chunks.forEach((chunk, index) => { if (index === len - 1) return - var params = [] - var camelCased = chunk[0] === '_' + const params = [] + let camelCased = chunk[0] === '_' ? '_' + chunk.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase()) : chunk.replace(/_([a-z])/g, k => k[1].toUpperCase()) @@ -507,7 +507,7 @@ function genUrlValidation (paths, api) { } else { code += `${index ? '} else ' : ''}if ((params['${chunk}'] != null || params['${camelCased}'] != null) && (` } - for (var i = index + 1; i < len; i++) { + for (let i = index + 1; i < len; i++) { params.push(chunks[i]) // url parts can be declared in camelCase fashion camelCased = chunks[i][0] === '_' @@ -543,7 +543,7 @@ function generateDocumentation ({ documentation }, op) { if (documentation == null) return '' - var doc = '/**\n' + let doc = '/**\n' doc += ` * Perform a ${op} request\n` if (documentation.description) { doc += ` * ${documentation.description.replace(/\u00A0/g, ' ')}\n` diff --git a/scripts/utils/generateDocs.js b/scripts/utils/generateDocs.js index b7ee9bcaf..9f681ab49 100644 --- a/scripts/utils/generateDocs.js +++ b/scripts/utils/generateDocs.js @@ -28,7 +28,7 @@ const codeExamples = readdirSync(join(__dirname, '..', '..', 'docs', 'examples') .filter(api => api !== 'index') function generateDocs (common, spec) { - var doc = dedent` + let doc = dedent` [[api-reference]] //////// @@ -110,7 +110,7 @@ function generateDocs (common, spec) { } function commonParameters (spec) { - var doc = dedent` + let doc = dedent` [discrete] === Common parameters Parameters that are accepted by all API endpoints. @@ -196,7 +196,7 @@ function generateApiDoc (spec) { const codeParameters = params .reduce((acc, val) => { - var code = `${val.name}: ${val.type},` + const code = `${val.name}: ${val.type},` acc += acc === '' ? code : '\n ' + code @@ -210,7 +210,7 @@ function generateApiDoc (spec) { ? '' : `*Stability:* ${spec[name].stability}` - var doc = dedent` + let doc = dedent` [discrete] === ${camelify(name)} ${stability} diff --git a/scripts/utils/generateMain.js b/scripts/utils/generateMain.js index cfdd2fe3a..c615d6476 100644 --- a/scripts/utils/generateMain.js +++ b/scripts/utils/generateMain.js @@ -235,44 +235,44 @@ function buildMethodDefinition (opts, api, name, hasBody, isHead) { if (opts.kibana) { if (hasBody) { return [ - { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise>` } + { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise>' } ] } else { return [ - { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise>` } + { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise>' } ] } } if (hasBody) { let methods = [ - { key: `${api}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise>` }, - { key: `${api}(callback: callbackFn)`, val: `TransportRequestCallback` }, - { key: `${api}(params: RequestParams.${Name}, callback: callbackFn)`, val: `TransportRequestCallback` }, - { key: `${api}(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn)`, val: `TransportRequestCallback` } + { key: `${api}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise>' }, + { key: `${api}(callback: callbackFn)`, val: 'TransportRequestCallback' }, + { key: `${api}(params: RequestParams.${Name}, callback: callbackFn)`, val: 'TransportRequestCallback' }, + { key: `${api}(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn)`, val: 'TransportRequestCallback' } ] if (isSnakeCased(api)) { methods = methods.concat([ - { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise>` }, - { key: `${camelify(api)}(callback: callbackFn)`, val: `TransportRequestCallback` }, - { key: `${camelify(api)}(params: RequestParams.${Name}, callback: callbackFn)`, val: `TransportRequestCallback` }, - { key: `${camelify(api)}(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn)`, val: `TransportRequestCallback` } + { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise>' }, + { key: `${camelify(api)}(callback: callbackFn)`, val: 'TransportRequestCallback' }, + { key: `${camelify(api)}(params: RequestParams.${Name}, callback: callbackFn)`, val: 'TransportRequestCallback' }, + { key: `${camelify(api)}(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn)`, val: 'TransportRequestCallback' } ]) } return methods } else { let methods = [ - { key: `${api}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise>` }, - { key: `${api}(callback: callbackFn)`, val: `TransportRequestCallback` }, - { key: `${api}(params: RequestParams.${Name}, callback: callbackFn)`, val: `TransportRequestCallback` }, - { key: `${api}(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn)`, val: `TransportRequestCallback` } + { key: `${api}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise>' }, + { key: `${api}(callback: callbackFn)`, val: 'TransportRequestCallback' }, + { key: `${api}(params: RequestParams.${Name}, callback: callbackFn)`, val: 'TransportRequestCallback' }, + { key: `${api}(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn)`, val: 'TransportRequestCallback' } ] if (isSnakeCased(api)) { methods = methods.concat([ - { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: `TransportRequestPromise>` }, - { key: `${camelify(api)}(callback: callbackFn)`, val: `TransportRequestCallback` }, - { key: `${camelify(api)}(params: RequestParams.${Name}, callback: callbackFn)`, val: `TransportRequestCallback` }, - { key: `${camelify(api)}(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn)`, val: `TransportRequestCallback` } + { key: `${camelify(api)}(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise>' }, + { key: `${camelify(api)}(callback: callbackFn)`, val: 'TransportRequestCallback' }, + { key: `${camelify(api)}(params: RequestParams.${Name}, callback: callbackFn)`, val: 'TransportRequestCallback' }, + { key: `${camelify(api)}(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn)`, val: 'TransportRequestCallback' } ]) } return methods diff --git a/scripts/utils/generateRequestTypes.js b/scripts/utils/generateRequestTypes.js index 0a5963efd..1b6fcfbcd 100644 --- a/scripts/utils/generateRequestTypes.js +++ b/scripts/utils/generateRequestTypes.js @@ -33,7 +33,7 @@ const ndjsonApiKey = ndjsonApi function generate (version, api) { const release = semver.valid(version) ? semver.major(version) : version - var types = `/* + let types = `/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright @@ -79,8 +79,8 @@ export interface Generic { // get the required parts from the url // if the url has at least one static path, // then there are not required parts of the url - var allParts = [] - var requiredParts = [] + let allParts = [] + let requiredParts = [] for (const path of paths) { if (path.parts) { allParts.push(Object.keys(path.parts)) @@ -146,7 +146,7 @@ export interface ${toPascalCase(name)}${body ? `` : ''} exte case 'time': case 'timeout': return 'string' - case 'enum': + case 'enum': { // the following code changes 'true' | 'false' to boolean let foundTrue = false let foundFalse = false @@ -172,6 +172,7 @@ export interface ${toPascalCase(name)}${body ? `` : ''} exte options.push('boolean') } return options.join(' | ') + } case 'int': case 'double': case 'long': diff --git a/test/acceptance/observability.test.js b/test/acceptance/observability.test.js index f3dae359a..d7141b923 100644 --- a/test/acceptance/observability.test.js +++ b/test/acceptance/observability.test.js @@ -16,7 +16,7 @@ test('Request id', t => { const genReqId = generateRequestId() t.type(genReqId, 'function') - for (var i = 1; i <= 10; i++) { + for (let i = 1; i <= 10; i++) { t.strictEqual(genReqId(), i) } diff --git a/test/acceptance/resurrect.test.js b/test/acceptance/resurrect.test.js index 2c5aaf0b5..d9bfd6112 100644 --- a/test/acceptance/resurrect.test.js +++ b/test/acceptance/resurrect.test.js @@ -95,7 +95,7 @@ test('Resurrect a node and handle 502/3/4 status code', t => { const clock = FakeTimers.install({ toFake: ['Date'] }) const q = workq() - var count = 0 + let count = 0 function handler (req, res) { res.statusCode = count++ < 2 ? 502 : 200 res.setHeader('content-type', 'application/json') @@ -114,7 +114,7 @@ test('Resurrect a node and handle 502/3/4 status code', t => { maxRetries: 0 }) - var idCount = 2 + let idCount = 2 client.on(events.RESURRECT, (err, meta) => { t.error(err) t.strictEqual(meta.strategy, 'ping') diff --git a/test/acceptance/sniff.test.js b/test/acceptance/sniff.test.js index dbd62db88..89008ce21 100644 --- a/test/acceptance/sniff.test.js +++ b/test/acceptance/sniff.test.js @@ -59,7 +59,7 @@ test('Should update the connection pool', t => { t.strictEqual(hosts.length, 4) const ids = Object.keys(nodes) - for (var i = 0; i < hosts.length; i++) { + for (let i = 0; i < hosts.length; i++) { const id = ids[i] // the first node will be an update of the existing one if (id === 'node0') { @@ -118,7 +118,7 @@ test('Should handle hostnames in publish_address', t => { t.error(err) t.strictEqual(hosts.length, 4) - for (var i = 0; i < hosts.length; i++) { + for (let i = 0; i < hosts.length; i++) { // the first node will be an update of the existing one t.strictEqual(hosts[i].url.hostname, 'localhost') } diff --git a/test/benchmarks/macro/complex.bench.js b/test/benchmarks/macro/complex.bench.js index 00cb6c47f..fedc5a1ea 100644 --- a/test/benchmarks/macro/complex.bench.js +++ b/test/benchmarks/macro/complex.bench.js @@ -16,7 +16,7 @@ const { bench, beforeEach, afterEach } = require('../suite')({ } }) -var stackoverflow = [] +let stackoverflow = [] const stackoverflowPath = join( __dirname, 'fixtures', @@ -53,7 +53,7 @@ bench('Bulk index documents', { action: 'bulk' }, async b => { b.start() - for (var i = 0; i < stackoverflow.length; i++) { + for (let i = 0; i < stackoverflow.length; i++) { await b.client.bulk({ body: stackoverflow[i] }) } b.end() @@ -67,7 +67,7 @@ bench('Complex search request', { action: 'search' }, async b => { b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.search({ index: INDEX, body: { @@ -81,8 +81,8 @@ bench('Complex search request', { }) function readSOfile () { - var i = 0 - var stackoverflow = [] + let i = 0 + const stackoverflow = [] return new Promise((resolve, reject) => { createReadStream(stackoverflowPath) .pipe(split(JSON.parse)) diff --git a/test/benchmarks/macro/simple.bench.js b/test/benchmarks/macro/simple.bench.js index f734d1ff2..f8c735bf7 100644 --- a/test/benchmarks/macro/simple.bench.js +++ b/test/benchmarks/macro/simple.bench.js @@ -44,7 +44,7 @@ bench('Ping', { action: 'ping' }, async b => { b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.ping() } b.end() @@ -57,7 +57,7 @@ bench('Create index', { action: 'indices.create' }, async b => { b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.indices.create({ index: `test-create-${i}` }) } b.end() @@ -75,7 +75,7 @@ bench('Index small document', { await b.client.indices.create({ index }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.create({ index, type: '_doc', @@ -98,7 +98,7 @@ bench('Index large document', { await b.client.indices.create({ index }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.create({ index, type: '_doc', @@ -128,7 +128,7 @@ bench('Get small document', { }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.get({ index, type: '_doc', @@ -157,7 +157,7 @@ bench('Get large document', { }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.get({ index, type: '_doc', @@ -187,7 +187,7 @@ bench('Search small document', { }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.search({ index, type: '_doc', @@ -221,7 +221,7 @@ bench('Search large document', { }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.search({ index, type: '_doc', @@ -255,7 +255,7 @@ bench('Update small document', { }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await b.client.update({ index, type: '_doc', diff --git a/test/benchmarks/micro/basic.bench.js b/test/benchmarks/micro/basic.bench.js index d829945ca..271a02ff4 100644 --- a/test/benchmarks/micro/basic.bench.js +++ b/test/benchmarks/micro/basic.bench.js @@ -12,7 +12,7 @@ const { connection } = require('../../utils') bench('Initialization', { warmup: 5, measure: 10, iterations: 1000 }, async b => { b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { const client = new Client({ // eslint-disable-line node: 'http://localhost:9200' }) @@ -56,7 +56,7 @@ bench('Basic get', { warmup: 5, measure: 10, iterations: 1000 }, async b => { q: 'foo:bar' }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await client.search({ index: 'test', type: 'doc', @@ -83,7 +83,7 @@ bench('Basic post', { warmup: 5, measure: 10, iterations: 1000 }, async b => { } }) b.start() - for (var i = 0; i < b.iterations; i++) { + for (let i = 0; i < b.iterations; i++) { await client.search({ index: 'test', type: 'doc', diff --git a/test/benchmarks/suite.js b/test/benchmarks/suite.js index 251e58749..8fe575556 100644 --- a/test/benchmarks/suite.js +++ b/test/benchmarks/suite.js @@ -15,10 +15,10 @@ function buildBenchmark (options = {}) { const q = workq() const stats = {} const reports = [] - var beforeEach = null - var afterEach = null - var setup = null - var teardown = null + let beforeEach = null + let afterEach = null + let setup = null + let teardown = null function setBeforeEach (fn) { beforeEach = fn @@ -54,7 +54,7 @@ function buildBenchmark (options = {}) { } stats[title] = [] - var { measure, warmup } = opts + let { measure, warmup } = opts const b = new B({ iterations: opts.iterations }) q.add(runSetup) @@ -225,7 +225,7 @@ function buildBenchmark (options = {}) { } }) - for (var i = 0; i < results.length; i++) { + for (let i = 0; i < results.length; i++) { await client.index({ index: 'benchmarking_results', type: '_doc', diff --git a/test/integration/helpers/scroll.test.js b/test/integration/helpers/scroll.test.js index 93594bccf..e7777a4b7 100644 --- a/test/integration/helpers/scroll.test.js +++ b/test/integration/helpers/scroll.test.js @@ -65,7 +65,7 @@ test('search helper', async t => { } }) - var count = 0 + let count = 0 for await (const search of scrollSearch) { count += 1 for (const doc of search.documents) { @@ -87,7 +87,7 @@ test('clear a scroll search', async t => { } }) - var count = 0 + let count = 0 for await (const search of scrollSearch) { count += 1 if (count === 2) { @@ -109,7 +109,7 @@ test('scroll documents', async t => { } }) - var count = 0 + let count = 0 for await (const doc of scrollSearch) { count += 1 t.true(doc.title.toLowerCase().includes('javascript')) diff --git a/test/integration/index.js b/test/integration/index.js index 954bf93d2..01a539813 100644 --- a/test/integration/index.js +++ b/test/integration/index.js @@ -178,8 +178,8 @@ async function start ({ client, isXPack }) { // to provide a better test log output .reduce((arr, file) => { const path = file.slice(file.indexOf('/rest-api-spec/test'), file.lastIndexOf('/')) - var inserted = false - for (var i = 0; i < arr.length; i++) { + let inserted = false + for (let i = 0; i < arr.length; i++) { if (arr[i][0].includes(path)) { inserted = true arr[i].push(file) @@ -222,8 +222,8 @@ async function start ({ client, isXPack }) { .filter(Boolean) // get setup and teardown if present - var setupTest = null - var teardownTest = null + let setupTest = null + let teardownTest = null for (const test of tests) { if (test.setup) setupTest = test.setup if (test.teardown) teardownTest = test.teardown @@ -298,14 +298,15 @@ function log (text) { } function now () { - var ts = process.hrtime() + const ts = process.hrtime() return (ts[0] * 1e3) + (ts[1] / 1e6) } function parse (data) { const schema = yaml.Schema.create(yaml.CORE_SCHEMA, []) + let doc try { - var doc = yaml.safeLoad(data, { schema }) + doc = yaml.safeLoad(data, { schema }) } catch (err) { console.error(err) return @@ -328,8 +329,8 @@ function withSHA (sha) { }) function _withSHA (callback) { - var fresh = false - var retry = 0 + let fresh = false + let retry = 0 if (!pathExist(esFolder)) { if (!createFolder(esFolder)) { @@ -426,10 +427,10 @@ if (require.main === module) { } const shouldSkip = (isXPack, file, name) => { - var list = Object.keys(freeSkips) - for (var i = 0; i < list.length; i++) { + let list = Object.keys(freeSkips) + for (let i = 0; i < list.length; i++) { const freeTest = freeSkips[list[i]] - for (var j = 0; j < freeTest.length; j++) { + for (let j = 0; j < freeTest.length; j++) { if (file.endsWith(list[i]) && (name === freeTest[j] || freeTest[j] === '*')) { const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name log(`Skipping test ${testName} because is blacklisted in the free test`) @@ -440,9 +441,9 @@ const shouldSkip = (isXPack, file, name) => { if (file.includes('x-pack') || isXPack) { list = Object.keys(platinumBlackList) - for (i = 0; i < list.length; i++) { + for (let i = 0; i < list.length; i++) { const platTest = platinumBlackList[list[i]] - for (j = 0; j < platTest.length; j++) { + for (let j = 0; j < platTest.length; j++) { if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) { const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name log(`Skipping test ${testName} because is blacklisted in the platinum test`) diff --git a/test/integration/test-runner.js b/test/integration/test-runner.js index b0e67a285..6b00c04c7 100644 --- a/test/integration/test-runner.js +++ b/test/integration/test-runner.js @@ -320,7 +320,7 @@ function build (opts = {}) { */ function set (key, name) { if (key.includes('_arbitrary_key_')) { - var currentVisit = null + let currentVisit = null for (const path of key.split('.')) { if (path === '_arbitrary_key_') { const keys = Object.keys(currentVisit) @@ -361,8 +361,9 @@ function build (opts = {}) { */ async function doAction (action, stats) { const cmd = parseDo(action) + let api try { - var api = delve(client, cmd.method).bind(client) + api = delve(client, cmd.method).bind(client) } catch (err) { console.error(`\nError: Cannot find the method '${cmd.method}' in the client.\n`) process.exit(1) @@ -373,8 +374,8 @@ function build (opts = {}) { if (cmd.params.ignore) delete cmd.params.ignore const [err, result] = await to(api(cmd.params, options)) - var warnings = result ? result.warnings : null - var body = result ? result.body : null + let warnings = result ? result.warnings : null + const body = result ? result.body : null if (action.warnings && warnings === null) { assert.fail('We should get a warning header', action.warnings) @@ -719,7 +720,7 @@ function parseDo (action) { for (const key in obj) { const val = obj[key] - var newKey = key + let newKey = key if (!~doNotCamelify.indexOf(key)) { // if the key starts with `_` we should not camelify the first occurence // eg: _source_include => _sourceInclude @@ -776,7 +777,7 @@ function parseDoError (err, spec) { function getSkip (arr) { if (!Array.isArray(arr)) return null - for (var i = 0; i < arr.length; i++) { + for (let i = 0; i < arr.length; i++) { if (arr[i].skip) return arr[i].skip } return null @@ -822,7 +823,7 @@ function logSkip (action) { * @returns {boolean} */ function shouldSkip (esVersion, action) { - var shouldSkip = false + let shouldSkip = false // skip based on the version if (action.version) { if (action.version.trim() === 'all') return true diff --git a/test/unit/base-connection-pool.test.js b/test/unit/base-connection-pool.test.js index d313a774b..268f68e70 100644 --- a/test/unit/base-connection-pool.test.js +++ b/test/unit/base-connection-pool.test.js @@ -63,7 +63,7 @@ test('API', t => { t.test('markDead', t => { const pool = new BaseConnectionPool({ Connection, sniffEnabled: true }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + let connection = pool.addConnection(href) t.same(pool.markDead(connection), pool) connection = pool.connections.find(c => c.id === href) t.strictEqual(connection.status, Connection.statuses.ALIVE) @@ -73,7 +73,7 @@ test('API', t => { t.test('markAlive', t => { const pool = new BaseConnectionPool({ Connection, sniffEnabled: true }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + let connection = pool.addConnection(href) t.same(pool.markAlive(connection), pool) connection = pool.connections.find(c => c.id === href) t.strictEqual(connection.status, Connection.statuses.ALIVE) @@ -96,7 +96,7 @@ test('API', t => { t.test('removeConnection', t => { const pool = new BaseConnectionPool({ Connection }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + const connection = pool.addConnection(href) pool.removeConnection(connection) t.strictEqual(pool.size, 0) t.end() diff --git a/test/unit/child.test.js b/test/unit/child.test.js index 2f42131f4..1cb3d2b66 100644 --- a/test/unit/child.test.js +++ b/test/unit/child.test.js @@ -29,7 +29,7 @@ const { test('Should create a child client (headers check)', t => { t.plan(4) - var count = 0 + let count = 0 function handler (req, res) { if (count++ === 0) { t.match(req.headers, { 'x-foo': 'bar' }) @@ -216,12 +216,12 @@ test('Should create a child client (generateRequestId check)', t => { t.plan(6) function generateRequestId1 () { - var id = 0 + let id = 0 return () => `trace-1-${id++}` } function generateRequestId2 () { - var id = 0 + let id = 0 return () => `trace-2-${id++}` } @@ -235,7 +235,7 @@ test('Should create a child client (generateRequestId check)', t => { generateRequestId: generateRequestId2() }) - var count = 0 + let count = 0 client.on('request', (err, { meta }) => { t.error(err) t.strictEqual( @@ -266,7 +266,7 @@ test('Should create a child client (name check)', t => { t.strictEqual(client.name, 'parent') t.strictEqual(child.name, 'child') - var count = 0 + let count = 0 client.on('request', (err, { meta }) => { t.error(err) t.strictEqual( @@ -284,7 +284,7 @@ test('Should create a child client (name check)', t => { test('Should create a child client (auth check)', t => { t.plan(4) - var count = 0 + let count = 0 function handler (req, res) { if (count++ === 0) { t.match(req.headers, { authorization: 'Basic Zm9vOmJhcg==' }) diff --git a/test/unit/client.test.js b/test/unit/client.test.js index ac9cfe980..be61e1bf5 100644 --- a/test/unit/client.test.js +++ b/test/unit/client.test.js @@ -287,7 +287,7 @@ test('Authentication', t => { t.test('Custom basic authentication per request', t => { t.plan(6) - var first = true + let first = true function handler (req, res) { t.match(req.headers, { authorization: first ? 'hello' : 'Basic Zm9vOmJhcg==' @@ -322,7 +322,7 @@ test('Authentication', t => { t.test('Override default basic authentication per request', t => { t.plan(6) - var first = true + let first = true function handler (req, res) { t.match(req.headers, { authorization: first ? 'hello' : 'Basic Zm9vOmJhcg==' @@ -419,7 +419,7 @@ test('Authentication', t => { t.test('Custom ApiKey authentication per request', t => { t.plan(6) - var first = true + let first = true function handler (req, res) { t.match(req.headers, { authorization: first ? 'ApiKey Zm9vOmJhcg==' : 'Basic Zm9vOmJhcg==' @@ -454,7 +454,7 @@ test('Authentication', t => { t.test('Override default ApiKey authentication per request', t => { t.plan(6) - var first = true + let first = true function handler (req, res) { t.match(req.headers, { authorization: first ? 'hello' : 'ApiKey Zm9vOmJhcg==' diff --git a/test/unit/connection-pool.test.js b/test/unit/connection-pool.test.js index bb40382db..6569f59f2 100644 --- a/test/unit/connection-pool.test.js +++ b/test/unit/connection-pool.test.js @@ -66,7 +66,7 @@ test('API', t => { t.test('markDead', t => { const pool = new ConnectionPool({ Connection, sniffEnabled: true }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + let connection = pool.addConnection(href) pool.markDead(connection) connection = pool.connections.find(c => c.id === href) t.strictEqual(connection.deadCount, 1) @@ -100,7 +100,7 @@ test('API', t => { t.test('markAlive', t => { const pool = new ConnectionPool({ Connection, sniffEnabled: true }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + let connection = pool.addConnection(href) pool.markDead(connection) pool.markAlive(connection) connection = pool.connections.find(c => c.id === href) @@ -121,7 +121,7 @@ test('API', t => { sniffEnabled: true }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + const connection = pool.addConnection(href) pool.markDead(connection) const opts = { now: Date.now() + 1000 * 60 * 3, @@ -147,7 +147,7 @@ test('API', t => { sniffEnabled: true }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + const connection = pool.addConnection(href) pool.markDead(connection) const opts = { now: Date.now() + 1000 * 60 * 3, @@ -175,7 +175,7 @@ test('API', t => { sniffEnabled: true }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + const connection = pool.addConnection(href) pool.markDead(connection) const opts = { now: Date.now() + 1000 * 60 * 3, @@ -200,7 +200,7 @@ test('API', t => { sniffEnabled: true }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + const connection = pool.addConnection(href) pool.markDead(connection) const opts = { now: Date.now() + 1000 * 60 * 3, @@ -292,7 +292,7 @@ test('API', t => { t.test('removeConnection', t => { const pool = new ConnectionPool({ Connection }) const href = 'http://localhost:9200/' - var connection = pool.addConnection(href) + const connection = pool.addConnection(href) t.ok(pool.getConnection() instanceof Connection) pool.removeConnection(connection) t.strictEqual(pool.getConnection(), null) diff --git a/test/unit/connection.test.js b/test/unit/connection.test.js index 9088e5438..6b1b2e653 100644 --- a/test/unit/connection.test.js +++ b/test/unit/connection.test.js @@ -58,7 +58,7 @@ test('Basic (http)', t => { connection: 'keep-alive' }) - var payload = '' + let payload = '' res.setEncoding('utf8') res.on('data', chunk => { payload += chunk }) res.on('error', err => t.fail(err)) @@ -98,7 +98,7 @@ test('Basic (https)', t => { connection: 'keep-alive' }) - var payload = '' + let payload = '' res.setEncoding('utf8') res.on('data', chunk => { payload += chunk }) res.on('error', err => t.fail(err)) @@ -139,7 +139,7 @@ test('Basic (https with ssl agent)', t => { connection: 'keep-alive' }) - var payload = '' + let payload = '' res.setEncoding('utf8') res.on('data', chunk => { payload += chunk }) res.on('error', err => t.fail(err)) @@ -193,7 +193,7 @@ test('Custom http agent', t => { connection: 'keep-alive' }) - var payload = '' + let payload = '' res.setEncoding('utf8') res.on('data', chunk => { payload += chunk }) res.on('error', err => t.fail(err)) @@ -317,7 +317,7 @@ test('Body request', t => { t.plan(2) function handler (req, res) { - var payload = '' + let payload = '' req.setEncoding('utf8') req.on('data', chunk => { payload += chunk }) req.on('error', err => t.fail(err)) @@ -346,7 +346,7 @@ test('Send body as buffer', t => { t.plan(2) function handler (req, res) { - var payload = '' + let payload = '' req.setEncoding('utf8') req.on('data', chunk => { payload += chunk }) req.on('error', err => t.fail(err)) @@ -375,7 +375,7 @@ test('Send body as stream', t => { t.plan(2) function handler (req, res) { - var payload = '' + let payload = '' req.setEncoding('utf8') req.on('data', chunk => { payload += chunk }) req.on('error', err => t.fail(err)) @@ -424,7 +424,7 @@ test('Should not close a connection if there are open requests', t => { t.error(err) t.strictEqual(connection._openRequests, 0) - var payload = '' + let payload = '' res.setEncoding('utf8') res.on('data', chunk => { payload += chunk }) res.on('error', err => t.fail(err)) @@ -461,7 +461,7 @@ test('Should not close a connection if there are open requests (with agent disab t.error(err) t.strictEqual(connection._openRequests, 0) - var payload = '' + let payload = '' res.setEncoding('utf8') res.on('data', chunk => { payload += chunk }) res.on('error', err => t.fail(err)) @@ -571,7 +571,7 @@ test('asStream set to true', t => { }, (err, res) => { t.error(err) - var payload = '' + let payload = '' res.setEncoding('utf8') res.on('data', chunk => { payload += chunk }) res.on('error', err => t.fail(err)) diff --git a/test/unit/events.test.js b/test/unit/events.test.js index 2842e7e46..cfaf20e29 100644 --- a/test/unit/events.test.js +++ b/test/unit/events.test.js @@ -20,7 +20,6 @@ 'use strict' const { test } = require('tap') -const semver = require('semver') const { Client, events } = require('../../index') const { TimeoutError } = require('../../lib/errors') const { @@ -127,7 +126,7 @@ test('Should emit a request event once when a request is performed', t => { }) }) -test('Remove an event', { skip: semver.lt(process.versions.node, '10.0.0') }, t => { +test('Remove an event', t => { t.plan(4) const client = new Client({ diff --git a/test/unit/helpers/bulk.test.js b/test/unit/helpers/bulk.test.js index 6fa9bf674..e99f3fba3 100644 --- a/test/unit/helpers/bulk.test.js +++ b/test/unit/helpers/bulk.test.js @@ -23,7 +23,6 @@ const { createReadStream } = require('fs') const { join } = require('path') const split = require('split2') const FakeTimers = require('@sinonjs/fake-timers') -const semver = require('semver') const { test } = require('tap') const { Client, errors } = require('../../../') const { buildServer, connection } = require('../../utils') @@ -308,10 +307,6 @@ test('bulk index', t => { }) t.test('Should perform a bulk request (retry)', async t => { - if (semver.lt(process.versions.node, '10.0.0')) { - t.skip('This test will not pass on Node v8') - return - } async function handler (req, res) { t.strictEqual(req.url, '/_bulk') t.match(req.headers, { 'content-type': 'application/x-ndjson' }) @@ -430,10 +425,6 @@ test('bulk index', t => { }) t.test('Should perform a bulk request (failure)', async t => { - if (semver.lt(process.versions.node, '10.0.0')) { - t.skip('This test will not pass on Node v8') - return - } async function handler (req, res) { t.strictEqual(req.url, '/_bulk') t.match(req.headers, { 'content-type': 'application/x-ndjson' }) @@ -575,10 +566,6 @@ test('bulk index', t => { }) t.test('Should abort a bulk request', async t => { - if (semver.lt(process.versions.node, '10.0.0')) { - t.skip('This test will not pass on Node v8') - return - } async function handler (req, res) { t.strictEqual(req.url, '/_bulk') t.match(req.headers, { 'content-type': 'application/x-ndjson' }) diff --git a/test/unit/helpers/scroll.test.js b/test/unit/helpers/scroll.test.js index dc4960149..cfc26d959 100644 --- a/test/unit/helpers/scroll.test.js +++ b/test/unit/helpers/scroll.test.js @@ -29,7 +29,7 @@ if (clientVersion.includes('-')) { const nodeVersion = process.versions.node test('Scroll search', async t => { - var count = 0 + let count = 0 const MockConnection = connection.buildMockConnection({ onRequest (params) { t.match(params.headers, { @@ -52,10 +52,10 @@ test('Scroll search', async t => { hits: count === 3 ? [] : [ - { _source: { one: 'one' } }, - { _source: { two: 'two' } }, - { _source: { three: 'three' } } - ] + { _source: { one: 'one' } }, + { _source: { two: 'two' } }, + { _source: { three: 'three' } } + ] } } } @@ -79,7 +79,7 @@ test('Scroll search', async t => { }) test('Clear a scroll search', async t => { - var count = 0 + let count = 0 const MockConnection = connection.buildMockConnection({ onRequest (params) { t.notMatch(params.headers, { @@ -129,7 +129,7 @@ test('Clear a scroll search', async t => { }) test('Scroll search (retry)', async t => { - var count = 0 + let count = 0 const MockConnection = connection.buildMockConnection({ onRequest (params) { count += 1 @@ -149,10 +149,10 @@ test('Scroll search (retry)', async t => { hits: count === 4 ? [] : [ - { _source: { one: 'one' } }, - { _source: { two: 'two' } }, - { _source: { three: 'three' } } - ] + { _source: { one: 'one' } }, + { _source: { two: 'two' } }, + { _source: { three: 'three' } } + ] } } } @@ -181,7 +181,7 @@ test('Scroll search (retry)', async t => { test('Scroll search (retry throws and maxRetries)', async t => { const maxRetries = 5 const expectedAttempts = maxRetries + 1 - var count = 0 + let count = 0 const MockConnection = connection.buildMockConnection({ onRequest (params) { count += 1 @@ -217,7 +217,7 @@ test('Scroll search (retry throws and maxRetries)', async t => { test('Scroll search (retry throws later)', async t => { const maxRetries = 5 const expectedAttempts = maxRetries + 2 - var count = 0 + let count = 0 const MockConnection = connection.buildMockConnection({ onRequest (params) { count += 1 @@ -268,7 +268,7 @@ test('Scroll search (retry throws later)', async t => { }) test('Scroll search documents', async t => { - var count = 0 + let count = 0 const MockConnection = connection.buildMockConnection({ onRequest (params) { if (count === 0) { @@ -287,10 +287,10 @@ test('Scroll search documents', async t => { hits: count === 3 ? [] : [ - { _source: { val: 1 * count } }, - { _source: { val: 2 * count } }, - { _source: { val: 3 * count } } - ] + { _source: { val: 1 * count } }, + { _source: { val: 2 * count } }, + { _source: { val: 3 * count } } + ] } } } @@ -321,7 +321,7 @@ test('Scroll search documents', async t => { test('Should not retry if maxRetries = 0', async t => { const maxRetries = 0 const expectedAttempts = 1 - var count = 0 + let count = 0 const MockConnection = connection.buildMockConnection({ onRequest (params) { count += 1 @@ -355,7 +355,7 @@ test('Should not retry if maxRetries = 0', async t => { }) test('Fix querystring for scroll search', async t => { - var count = 0 + let count = 0 const MockConnection = connection.buildMockConnection({ onRequest (params) { if (count === 0) { @@ -372,8 +372,8 @@ test('Fix querystring for scroll search', async t => { hits: count === 3 ? [] : [ - { _source: { val: count } } - ] + { _source: { val: count } } + ] } } } diff --git a/test/unit/selectors.test.js b/test/unit/selectors.test.js index 8985f90fe..798a457e0 100644 --- a/test/unit/selectors.test.js +++ b/test/unit/selectors.test.js @@ -27,7 +27,7 @@ test('RoundRobinSelector', t => { const arr = [0, 1, 2, 3, 4, 5] t.plan(arr.length + 1) - for (var i = 0; i <= arr.length; i++) { + for (let i = 0; i <= arr.length; i++) { t.strictEqual( selector(arr), i === arr.length ? arr[0] : arr[i] diff --git a/test/unit/transport.test.js b/test/unit/transport.test.js index 1026ba0a5..479f0419b 100644 --- a/test/unit/transport.test.js +++ b/test/unit/transport.test.js @@ -166,7 +166,7 @@ test('Send POST', t => { 'content-type': 'application/json', 'content-length': '17' }) - var json = '' + let json = '' req.setEncoding('utf8') req.on('data', chunk => { json += chunk }) req.on('error', err => t.fail(err)) @@ -217,7 +217,7 @@ test('Send POST (ndjson)', t => { 'content-type': 'application/x-ndjson', 'content-length': '67' }) - var json = '' + let json = '' req.setEncoding('utf8') req.on('data', chunk => { json += chunk }) req.on('error', err => t.fail(err)) @@ -265,7 +265,7 @@ test('Send stream', t => { t.match(req.headers, { 'content-type': 'application/json' }) - var json = '' + let json = '' req.setEncoding('utf8') req.on('data', chunk => { json += chunk }) req.on('error', err => t.fail(err)) @@ -308,7 +308,7 @@ test('Send stream (bulkBody)', t => { t.match(req.headers, { 'content-type': 'application/x-ndjson' }) - var json = '' + let json = '' req.setEncoding('utf8') req.on('data', chunk => { json += chunk }) req.on('error', err => t.fail(err)) @@ -587,7 +587,7 @@ test('ConnectionError (should call markDead on the failing connection)', t => { test('Retry mechanism', t => { t.plan(2) - var count = 0 + let count = 0 function handler (req, res) { res.setHeader('Content-Type', 'application/json;utf=8') if (count > 0) { @@ -635,7 +635,7 @@ test('Retry mechanism', t => { test('Should not retry if the body is a stream', t => { t.plan(2) - var count = 0 + let count = 0 function handler (req, res) { count++ res.setHeader('Content-Type', 'application/json;utf=8') @@ -680,7 +680,7 @@ test('Should not retry if the body is a stream', t => { test('Should not retry if the bulkBody is a stream', t => { t.plan(2) - var count = 0 + let count = 0 function handler (req, res) { count++ res.setHeader('Content-Type', 'application/json;utf=8') @@ -725,7 +725,7 @@ test('Should not retry if the bulkBody is a stream', t => { test('No retry', t => { t.plan(2) - var count = 0 + let count = 0 function handler (req, res) { count++ res.setHeader('Content-Type', 'application/json;utf=8') @@ -772,7 +772,7 @@ test('No retry', t => { test('Custom retry mechanism', t => { t.plan(2) - var count = 0 + let count = 0 function handler (req, res) { res.setHeader('Content-Type', 'application/json;utf=8') if (count > 0) { @@ -822,7 +822,7 @@ test('Custom retry mechanism', t => { test('Should not retry on 429', t => { t.plan(3) - var count = 0 + let count = 0 function handler (req, res) { t.strictEqual(count++, 0) res.statusCode = 429 @@ -988,7 +988,7 @@ test('Retry mechanism and abort', t => { id: 'node3' }]) - var count = 0 + let count = 0 const transport = new Transport({ emit: event => { if (event === 'request' && count++ > 0) { @@ -1294,7 +1294,7 @@ test('Should retry the request if the statusCode is 502/3/4', t => { t.test(statusCode, t => { t.plan(3) - var first = true + let first = true function handler (req, res) { if (first) { first = false @@ -1886,7 +1886,7 @@ test('asStream set to true', t => { 'content-type': 'application/json;utf=8' }) - var payload = '' + let payload = '' body.setEncoding('utf8') body.on('data', chunk => { payload += chunk }) body.on('error', err => t.fail(err)) @@ -1906,7 +1906,7 @@ test('Compress request', t => { 'content-type': 'application/json', 'content-encoding': 'gzip' }) - var json = '' + let json = '' req .pipe(createGunzip()) .on('data', chunk => { json += chunk }) @@ -1953,7 +1953,7 @@ test('Compress request', t => { 'content-type': 'application/json', 'content-encoding': 'gzip' }) - var json = '' + let json = '' req .pipe(createGunzip()) .on('data', chunk => { json += chunk }) @@ -1999,7 +1999,7 @@ test('Compress request', t => { 'content-type': 'application/json', 'content-encoding': 'gzip' }) - var json = '' + let json = '' req .pipe(createGunzip()) .on('data', chunk => { json += chunk }) @@ -2112,13 +2112,13 @@ test('Compress request', t => { t.test('Retry a gzipped body', t => { t.plan(7) - var count = 0 + let count = 0 function handler (req, res) { t.match(req.headers, { 'content-type': 'application/json', 'content-encoding': 'gzip' }) - var json = '' + let json = '' req .pipe(createGunzip()) .on('data', chunk => { json += chunk }) diff --git a/test/utils/MockConnection.js b/test/utils/MockConnection.js index 6a031f6e4..f714fdd30 100644 --- a/test/utils/MockConnection.js +++ b/test/utils/MockConnection.js @@ -30,7 +30,7 @@ const intoStream = require('into-stream') class MockConnection extends Connection { request (params, callback) { - var aborted = false + let aborted = false const stream = intoStream(JSON.stringify({ hello: 'world' })) stream.statusCode = setStatusCode(params.path) stream.headers = { @@ -54,7 +54,7 @@ class MockConnection extends Connection { class MockConnectionTimeout extends Connection { request (params, callback) { - var aborted = false + let aborted = false process.nextTick(() => { if (!aborted) { callback(new TimeoutError('Request timed out', params), null) @@ -70,7 +70,7 @@ class MockConnectionTimeout extends Connection { class MockConnectionError extends Connection { request (params, callback) { - var aborted = false + let aborted = false process.nextTick(() => { if (!aborted) { callback(new ConnectionError('Kaboom'), null) @@ -86,7 +86,7 @@ class MockConnectionError extends Connection { class MockConnectionSniff extends Connection { request (params, callback) { - var aborted = false + let aborted = false const sniffResult = { nodes: { 'node-1': { @@ -133,11 +133,11 @@ function buildMockConnection (opts) { class MockConnection extends Connection { request (params, callback) { - var { body, statusCode } = opts.onRequest(params) + let { body, statusCode } = opts.onRequest(params) if (typeof body !== 'string') { body = JSON.stringify(body) } - var aborted = false + let aborted = false const stream = intoStream(body) stream.statusCode = statusCode || 200 stream.headers = { diff --git a/test/utils/buildCluster.js b/test/utils/buildCluster.js index 2cfeea65d..95d20449e 100644 --- a/test/utils/buildCluster.js +++ b/test/utils/buildCluster.js @@ -23,7 +23,7 @@ const debug = require('debug')('elasticsearch-test') const workq = require('workq') const buildServer = require('./buildServer') -var id = 0 +let id = 0 function buildCluster (options, callback) { const clusterId = id++ debug(`Booting cluster '${clusterId}'`) @@ -37,7 +37,7 @@ function buildCluster (options, callback) { const sniffResult = { nodes: {} } options.numberOfNodes = options.numberOfNodes || 4 - for (var i = 0; i < options.numberOfNodes; i++) { + for (let i = 0; i < options.numberOfNodes; i++) { q.add(bootNode, { id: `node${i}` }) } diff --git a/test/utils/buildServer.js b/test/utils/buildServer.js index e21759bbc..b47b2fec2 100644 --- a/test/utils/buildServer.js +++ b/test/utils/buildServer.js @@ -35,7 +35,7 @@ const secureOpts = { cert: readFileSync(join(__dirname, '..', 'fixtures', 'https.cert'), 'utf8') } -var id = 0 +let id = 0 function buildServer (handler, opts, cb) { const serverId = id++ debug(`Booting server '${serverId}'`)