diff --git a/.gitignore b/.gitignore
index 143024273..f8274fc48 100644
--- a/.gitignore
+++ b/.gitignore
@@ -46,3 +46,5 @@ package-lock.json
# elasticsearch repo or binary files
elasticsearch*
+
+api/generated.d.ts
diff --git a/index.d.ts b/index.d.ts
new file mode 100644
index 000000000..8cdb34cd8
--- /dev/null
+++ b/index.d.ts
@@ -0,0 +1,473 @@
+///
+
+import { EventEmitter } from 'events';
+import { SecureContextOptions } from 'tls';
+import Transport from './lib/Transport';
+import Connection, { AgentOptions } from './lib/Connection';
+import ConnectionPool, { nodeSelectorFn, nodeFilterFn } from './lib/ConnectionPool';
+import Serializer from './lib/Serializer';
+import { ApiResponse } from './lib/Transport';
+
+declare type anyObject = {
+ [key: string]: any;
+};
+declare type callbackFn = (err: Error | null, result: ApiResponse) => void;
+declare type apiMethod = (params?: anyObject | callbackFn, callback?: callbackFn) => any;
+
+interface ClientOptions {
+ node?: string | string[];
+ nodes?: string | string[];
+ Connection?: typeof Connection;
+ ConnectionPool?: typeof ConnectionPool;
+ Transport?: typeof Transport;
+ Serializer?: typeof Serializer;
+ maxRetries?: number;
+ requestTimeout?: number;
+ pingTimeout?: number;
+ sniffInterval?: number;
+ sniffOnStart?: boolean;
+ sniffEndpoint?: string;
+ sniffOnConnectionFault?: boolean;
+ resurrectStrategy?: string;
+ randomizeHost?: boolean;
+ suggestCompression?: boolean;
+ ssl?: SecureContextOptions;
+ agent?: AgentOptions;
+ nodeFilter?: nodeFilterFn;
+ nodeSelector?: nodeSelectorFn | string;
+}
+
+declare class Client extends EventEmitter {
+ connectionPool: ConnectionPool;
+ transport: Transport;
+ serializer: Serializer
+ bulk: apiMethod
+ cat: {
+ aliases: apiMethod
+ allocation: apiMethod
+ count: apiMethod
+ fielddata: apiMethod
+ health: apiMethod
+ help: apiMethod
+ indices: apiMethod
+ master: apiMethod
+ nodeattrs: apiMethod
+ nodes: apiMethod
+ pending_tasks: apiMethod
+ pendingTasks: apiMethod
+ plugins: apiMethod
+ recovery: apiMethod
+ repositories: apiMethod
+ segments: apiMethod
+ shards: apiMethod
+ snapshots: apiMethod
+ tasks: apiMethod
+ templates: apiMethod
+ thread_pool: apiMethod
+ threadPool: apiMethod
+ }
+ ccr: {
+ delete_auto_follow_pattern: apiMethod
+ deleteAutoFollowPattern: apiMethod
+ follow: apiMethod
+ follow_stats: apiMethod
+ followStats: apiMethod
+ get_auto_follow_pattern: apiMethod
+ getAutoFollowPattern: apiMethod
+ pause_follow: apiMethod
+ pauseFollow: apiMethod
+ put_auto_follow_pattern: apiMethod
+ putAutoFollowPattern: apiMethod
+ resume_follow: apiMethod
+ resumeFollow: apiMethod
+ stats: apiMethod
+ unfollow: apiMethod
+ }
+ clear_scroll: apiMethod
+ clearScroll: apiMethod
+ cluster: {
+ allocation_explain: apiMethod
+ allocationExplain: apiMethod
+ get_settings: apiMethod
+ getSettings: apiMethod
+ health: apiMethod
+ pending_tasks: apiMethod
+ pendingTasks: apiMethod
+ put_settings: apiMethod
+ putSettings: apiMethod
+ remote_info: apiMethod
+ remoteInfo: apiMethod
+ reroute: apiMethod
+ state: apiMethod
+ stats: apiMethod
+ }
+ count: apiMethod
+ create: apiMethod
+ delete: apiMethod
+ delete_by_query: apiMethod
+ deleteByQuery: apiMethod
+ delete_by_query_rethrottle: apiMethod
+ deleteByQueryRethrottle: apiMethod
+ delete_script: apiMethod
+ deleteScript: apiMethod
+ exists: apiMethod
+ exists_source: apiMethod
+ existsSource: apiMethod
+ explain: apiMethod
+ field_caps: apiMethod
+ fieldCaps: apiMethod
+ get: apiMethod
+ get_script: apiMethod
+ getScript: apiMethod
+ get_source: apiMethod
+ getSource: apiMethod
+ index: apiMethod
+ indices: {
+ analyze: apiMethod
+ clear_cache: apiMethod
+ clearCache: apiMethod
+ close: apiMethod
+ create: apiMethod
+ delete: apiMethod
+ delete_alias: apiMethod
+ deleteAlias: apiMethod
+ delete_template: apiMethod
+ deleteTemplate: apiMethod
+ exists: apiMethod
+ exists_alias: apiMethod
+ existsAlias: apiMethod
+ exists_template: apiMethod
+ existsTemplate: apiMethod
+ exists_type: apiMethod
+ existsType: apiMethod
+ flush: apiMethod
+ flush_synced: apiMethod
+ flushSynced: apiMethod
+ forcemerge: apiMethod
+ get: apiMethod
+ get_alias: apiMethod
+ getAlias: apiMethod
+ get_field_mapping: apiMethod
+ getFieldMapping: apiMethod
+ get_mapping: apiMethod
+ getMapping: apiMethod
+ get_settings: apiMethod
+ getSettings: apiMethod
+ get_template: apiMethod
+ getTemplate: apiMethod
+ get_upgrade: apiMethod
+ getUpgrade: apiMethod
+ open: apiMethod
+ put_alias: apiMethod
+ putAlias: apiMethod
+ put_mapping: apiMethod
+ putMapping: apiMethod
+ put_settings: apiMethod
+ putSettings: apiMethod
+ put_template: apiMethod
+ putTemplate: apiMethod
+ recovery: apiMethod
+ refresh: apiMethod
+ rollover: apiMethod
+ segments: apiMethod
+ shard_stores: apiMethod
+ shardStores: apiMethod
+ shrink: apiMethod
+ split: apiMethod
+ stats: apiMethod
+ update_aliases: apiMethod
+ updateAliases: apiMethod
+ upgrade: apiMethod
+ validate_query: apiMethod
+ validateQuery: apiMethod
+ }
+ info: apiMethod
+ ingest: {
+ delete_pipeline: apiMethod
+ deletePipeline: apiMethod
+ get_pipeline: apiMethod
+ getPipeline: apiMethod
+ processor_grok: apiMethod
+ processorGrok: apiMethod
+ put_pipeline: apiMethod
+ putPipeline: apiMethod
+ simulate: apiMethod
+ }
+ mget: apiMethod
+ msearch: apiMethod
+ msearch_template: apiMethod
+ msearchTemplate: apiMethod
+ mtermvectors: apiMethod
+ nodes: {
+ hot_threads: apiMethod
+ hotThreads: apiMethod
+ info: apiMethod
+ reload_secure_settings: apiMethod
+ reloadSecureSettings: apiMethod
+ stats: apiMethod
+ usage: apiMethod
+ }
+ ping: apiMethod
+ put_script: apiMethod
+ putScript: apiMethod
+ rank_eval: apiMethod
+ rankEval: apiMethod
+ reindex: apiMethod
+ reindex_rethrottle: apiMethod
+ reindexRethrottle: apiMethod
+ render_search_template: apiMethod
+ renderSearchTemplate: apiMethod
+ scripts_painless_execute: apiMethod
+ scriptsPainlessExecute: apiMethod
+ scroll: apiMethod
+ search: apiMethod
+ search_shards: apiMethod
+ searchShards: apiMethod
+ search_template: apiMethod
+ searchTemplate: apiMethod
+ snapshot: {
+ create: apiMethod
+ create_repository: apiMethod
+ createRepository: apiMethod
+ delete: apiMethod
+ delete_repository: apiMethod
+ deleteRepository: apiMethod
+ get: apiMethod
+ get_repository: apiMethod
+ getRepository: apiMethod
+ restore: apiMethod
+ status: apiMethod
+ verify_repository: apiMethod
+ verifyRepository: apiMethod
+ }
+ tasks: {
+ cancel: apiMethod
+ get: apiMethod
+ list: apiMethod
+ }
+ termvectors: apiMethod
+ update: apiMethod
+ update_by_query: apiMethod
+ updateByQuery: apiMethod
+ update_by_query_rethrottle: apiMethod
+ updateByQueryRethrottle: apiMethod
+ xpack: {
+ graph: {
+ explore: apiMethod
+ }
+ info: apiMethod
+ license: {
+ delete: apiMethod
+ get: apiMethod
+ get_basic_status: apiMethod
+ getBasicStatus: apiMethod
+ get_trial_status: apiMethod
+ getTrialStatus: apiMethod
+ post: apiMethod
+ post_start_basic: apiMethod
+ postStartBasic: apiMethod
+ post_start_trial: apiMethod
+ postStartTrial: apiMethod
+ }
+ migration: {
+ deprecations: apiMethod
+ get_assistance: apiMethod
+ getAssistance: apiMethod
+ upgrade: apiMethod
+ }
+ ml: {
+ close_job: apiMethod
+ closeJob: apiMethod
+ delete_calendar: apiMethod
+ deleteCalendar: apiMethod
+ delete_calendar_event: apiMethod
+ deleteCalendarEvent: apiMethod
+ delete_calendar_job: apiMethod
+ deleteCalendarJob: apiMethod
+ delete_datafeed: apiMethod
+ deleteDatafeed: apiMethod
+ delete_expired_data: apiMethod
+ deleteExpiredData: apiMethod
+ delete_filter: apiMethod
+ deleteFilter: apiMethod
+ delete_forecast: apiMethod
+ deleteForecast: apiMethod
+ delete_job: apiMethod
+ deleteJob: apiMethod
+ delete_model_snapshot: apiMethod
+ deleteModelSnapshot: apiMethod
+ find_file_structure: apiMethod
+ findFileStructure: apiMethod
+ flush_job: apiMethod
+ flushJob: apiMethod
+ forecast: apiMethod
+ get_buckets: apiMethod
+ getBuckets: apiMethod
+ get_calendar_events: apiMethod
+ getCalendarEvents: apiMethod
+ get_calendars: apiMethod
+ getCalendars: apiMethod
+ get_categories: apiMethod
+ getCategories: apiMethod
+ get_datafeed_stats: apiMethod
+ getDatafeedStats: apiMethod
+ get_datafeeds: apiMethod
+ getDatafeeds: apiMethod
+ get_filters: apiMethod
+ getFilters: apiMethod
+ get_influencers: apiMethod
+ getInfluencers: apiMethod
+ get_job_stats: apiMethod
+ getJobStats: apiMethod
+ get_jobs: apiMethod
+ getJobs: apiMethod
+ get_model_snapshots: apiMethod
+ getModelSnapshots: apiMethod
+ get_overall_buckets: apiMethod
+ getOverallBuckets: apiMethod
+ get_records: apiMethod
+ getRecords: apiMethod
+ info: apiMethod
+ open_job: apiMethod
+ openJob: apiMethod
+ post_calendar_events: apiMethod
+ postCalendarEvents: apiMethod
+ post_data: apiMethod
+ postData: apiMethod
+ preview_datafeed: apiMethod
+ previewDatafeed: apiMethod
+ put_calendar: apiMethod
+ putCalendar: apiMethod
+ put_calendar_job: apiMethod
+ putCalendarJob: apiMethod
+ put_datafeed: apiMethod
+ putDatafeed: apiMethod
+ put_filter: apiMethod
+ putFilter: apiMethod
+ put_job: apiMethod
+ putJob: apiMethod
+ revert_model_snapshot: apiMethod
+ revertModelSnapshot: apiMethod
+ start_datafeed: apiMethod
+ startDatafeed: apiMethod
+ stop_datafeed: apiMethod
+ stopDatafeed: apiMethod
+ update_datafeed: apiMethod
+ updateDatafeed: apiMethod
+ update_filter: apiMethod
+ updateFilter: apiMethod
+ update_job: apiMethod
+ updateJob: apiMethod
+ update_model_snapshot: apiMethod
+ updateModelSnapshot: apiMethod
+ validate: apiMethod
+ validate_detector: apiMethod
+ validateDetector: apiMethod
+ }
+ monitoring: {
+ bulk: apiMethod
+ }
+ rollup: {
+ delete_job: apiMethod
+ deleteJob: apiMethod
+ get_jobs: apiMethod
+ getJobs: apiMethod
+ get_rollup_caps: apiMethod
+ getRollupCaps: apiMethod
+ get_rollup_index_caps: apiMethod
+ getRollupIndexCaps: apiMethod
+ put_job: apiMethod
+ putJob: apiMethod
+ rollup_search: apiMethod
+ rollupSearch: apiMethod
+ start_job: apiMethod
+ startJob: apiMethod
+ stop_job: apiMethod
+ stopJob: apiMethod
+ }
+ security: {
+ authenticate: apiMethod
+ change_password: apiMethod
+ changePassword: apiMethod
+ clear_cached_realms: apiMethod
+ clearCachedRealms: apiMethod
+ clear_cached_roles: apiMethod
+ clearCachedRoles: apiMethod
+ delete_privileges: apiMethod
+ deletePrivileges: apiMethod
+ delete_role: apiMethod
+ deleteRole: apiMethod
+ delete_role_mapping: apiMethod
+ deleteRoleMapping: apiMethod
+ delete_user: apiMethod
+ deleteUser: apiMethod
+ disable_user: apiMethod
+ disableUser: apiMethod
+ enable_user: apiMethod
+ enableUser: apiMethod
+ get_privileges: apiMethod
+ getPrivileges: apiMethod
+ get_role: apiMethod
+ getRole: apiMethod
+ get_role_mapping: apiMethod
+ getRoleMapping: apiMethod
+ get_token: apiMethod
+ getToken: apiMethod
+ get_user: apiMethod
+ getUser: apiMethod
+ get_user_privileges: apiMethod
+ getUserPrivileges: apiMethod
+ has_privileges: apiMethod
+ hasPrivileges: apiMethod
+ invalidate_token: apiMethod
+ invalidateToken: apiMethod
+ put_privileges: apiMethod
+ putPrivileges: apiMethod
+ put_role: apiMethod
+ putRole: apiMethod
+ put_role_mapping: apiMethod
+ putRoleMapping: apiMethod
+ put_user: apiMethod
+ putUser: apiMethod
+ }
+ sql: {
+ clear_cursor: apiMethod
+ clearCursor: apiMethod
+ query: apiMethod
+ translate: apiMethod
+ }
+ ssl: {
+ certificates: apiMethod
+ }
+ usage: apiMethod
+ watcher: {
+ ack_watch: apiMethod
+ ackWatch: apiMethod
+ activate_watch: apiMethod
+ activateWatch: apiMethod
+ deactivate_watch: apiMethod
+ deactivateWatch: apiMethod
+ delete_watch: apiMethod
+ deleteWatch: apiMethod
+ execute_watch: apiMethod
+ executeWatch: apiMethod
+ get_watch: apiMethod
+ getWatch: apiMethod
+ put_watch: apiMethod
+ putWatch: apiMethod
+ restart: apiMethod
+ start: apiMethod
+ stats: apiMethod
+ stop: apiMethod
+ }
+ }
+ constructor(opts?: ClientOptions);
+}
+
+declare const events: {
+ RESPONSE: string;
+ REQUEST: string;
+ ERROR: string;
+};
+
+export { Client, Transport, ConnectionPool, Connection, Serializer, events, ApiResponse };
diff --git a/index.js b/index.js
index cc5055da6..7765ae8fb 100644
--- a/index.js
+++ b/index.js
@@ -5,17 +5,10 @@ const Transport = require('./lib/Transport')
const Connection = require('./lib/Connection')
const ConnectionPool = require('./lib/ConnectionPool')
const Serializer = require('./lib/Serializer')
-const symbols = require('./lib/symbols')
const { ConfigurationError } = require('./lib/errors')
const buildApi = require('./api')
-const {
- kTransport,
- kConnectionPool,
- kSerializer
-} = symbols
-
class Client extends EventEmitter {
constructor (opts = {}) {
super()
@@ -57,8 +50,8 @@ class Client extends EventEmitter {
nodeSelector: 'round-robin'
}, opts)
- this[kSerializer] = new options.Serializer()
- this[kConnectionPool] = new options.ConnectionPool({
+ this.serializer = new options.Serializer()
+ this.connectionPool = new options.ConnectionPool({
pingTimeout: options.pingTimeout,
resurrectStrategy: options.resurrectStrategy,
randomizeHost: options.randomizeHost,
@@ -71,12 +64,12 @@ class Client extends EventEmitter {
})
// Add the connections before initialize the Transport
- this[kConnectionPool].addConnection(options.node || options.nodes)
+ this.connectionPool.addConnection(options.node || options.nodes)
- this[kTransport] = new options.Transport({
+ this.transport = new options.Transport({
emit: this.emit.bind(this),
- connectionPool: this[kConnectionPool],
- serializer: this[kSerializer],
+ connectionPool: this.connectionPool,
+ serializer: this.serializer,
maxRetries: options.maxRetries,
requestTimeout: options.requestTimeout,
sniffInterval: options.sniffInterval,
@@ -86,10 +79,8 @@ class Client extends EventEmitter {
suggestCompression: options.suggestCompression
})
- this.request = this[kTransport].request.bind(this[kTransport])
-
const apis = buildApi({
- makeRequest: this[kTransport].request.bind(this[kTransport]),
+ makeRequest: this.transport.request.bind(this.transport),
result: { body: null, statusCode: null, headers: null, warnings: null },
ConfigurationError
})
@@ -112,6 +103,5 @@ module.exports = {
ConnectionPool,
Connection,
Serializer,
- symbols,
events
}
diff --git a/lib/Connection.d.ts b/lib/Connection.d.ts
new file mode 100644
index 000000000..f746940de
--- /dev/null
+++ b/lib/Connection.d.ts
@@ -0,0 +1,56 @@
+///
+
+import { URL } from 'url';
+import * as http from 'http';
+import { SecureContextOptions } from 'tls';
+
+interface ConnectionOptions {
+ url: URL;
+ ssl?: SecureContextOptions;
+ id?: string;
+ headers?: any;
+ agent?: AgentOptions;
+ status?: string;
+ roles?: any;
+}
+
+export interface AgentOptions {
+ keepAlive: boolean;
+ keepAliveMsecs: number;
+ maxSockets: number;
+ maxFreeSockets: number;
+}
+
+export default class Connection {
+ static statuses: {
+ ALIVE: string;
+ DEAD: string;
+ };
+ static roles: {
+ MASTER: string;
+ DATA: string;
+ INGEST: string;
+ COORDINATING: string;
+ MACHINE_LEARNING: string;
+ };
+ url: URL;
+ ssl: SecureContextOptions | null;
+ id: string;
+ headers: any;
+ deadCount: number;
+ resurrectTimeout: number;
+ statuses: any;
+ roles: any;
+ makeRequest: any;
+ _openRequests: number;
+ _status: string;
+ _agent: http.Agent;
+ constructor(opts?: ConnectionOptions);
+ request(params: any, callback: (err: Error | null, response: http.IncomingMessage | null) => void): http.ClientRequest;
+ close(): Connection;
+ setRole(role: string, enabled: boolean): Connection;
+ status: string;
+ buildRequestObject(params: any): http.ClientRequestArgs;
+}
+
+export {};
diff --git a/lib/ConnectionPool.d.ts b/lib/ConnectionPool.d.ts
new file mode 100644
index 000000000..e4ad414c6
--- /dev/null
+++ b/lib/ConnectionPool.d.ts
@@ -0,0 +1,137 @@
+///
+
+import { SecureContextOptions } from 'tls';
+import Connection, { AgentOptions } from './Connection';
+
+export interface nodeSelectorFn {
+ (connections: Connection[]): Connection;
+}
+
+export interface nodeFilterFn {
+ (connection: Connection): boolean;
+}
+
+interface ConnectionPoolOptions {
+ ssl?: SecureContextOptions;
+ agent?: AgentOptions;
+ pingTimeout?: number;
+ randomizeHost?: boolean;
+ Connection: typeof Connection;
+ resurrectStrategy?: string;
+ nodeFilter?: nodeFilterFn;
+ nodeSelector?: string | nodeSelectorFn;
+}
+
+export interface getConnectionOptions {
+ filter?: nodeFilterFn;
+ selector?: nodeSelectorFn;
+}
+
+export default class ConnectionPool {
+ static resurrectStrategies: {
+ none: number;
+ ping: number;
+ optimistic: number;
+ };
+ connections: any;
+ dead: string[];
+ _ssl: SecureContextOptions | null;
+ _agent: AgentOptions | null;
+ resurrectTimeout: number;
+ resurrectTimeoutCutoff: number;
+ pingTimeout: number;
+ randomizeHost: boolean;
+ nodeFilter: nodeFilterFn;
+ nodeSelector: nodeSelectorFn;
+ Connection: typeof Connection;
+ resurrectStrategy: number;
+ constructor(opts?: ConnectionPoolOptions);
+ /**
+ * Marks a connection as 'alive'.
+ * If needed removes the connection from the dead list
+ * and then resets the `deadCount`.
+ *
+ * @param {object} connection
+ */
+ markAlive(connection: Connection): void;
+ /**
+ * Marks a connection as 'dead'.
+ * If needed adds the connection to the dead list
+ * and then increments the `deadCount`.
+ *
+ * @param {object} connection
+ */
+ markDead(connection: Connection): void;
+ /**
+ * If enabled, tries to resurrect a connection with the given
+ * resurrect strategy ('ping', 'optimistic', 'none').
+ *
+ * @param {number} epoch
+ * @param {function} callback (isAlive, connection)
+ */
+ resurrect(now?: number, callback?: (isAlive: boolean | null, connection: Connection | null) => void): void;
+ /**
+ * Returns an alive connection if present,
+ * otherwise returns null.
+ * By default it filters the `master` only nodes.
+ * It uses the selector to choose which
+ * connection return.
+ *
+ * @param {object} options (filter and selector)
+ * @returns {object|null} connection
+ */
+ getConnection(opts?: getConnectionOptions): Connection | null;
+ /**
+ * Adds a new connection to the pool.
+ *
+ * @param {object|string} host
+ * @returns {ConnectionPool}
+ */
+ addConnection(opts: any): Connection | void;
+ /**
+ * Removes a new connection to the pool.
+ *
+ * @param {object} connection
+ * @returns {ConnectionPool}
+ */
+ removeConnection(connection: Connection): ConnectionPool;
+ /**
+ * Empties the connection pool.
+ *
+ * @returns {ConnectionPool}
+ */
+ empty(): ConnectionPool;
+ /**
+ * Update the ConnectionPool with new connections.
+ *
+ * @param {array} array of connections
+ * @returns {ConnectionPool}
+ */
+ update(connections: Connection[]): ConnectionPool;
+ /**
+ * Transforms the nodes objects to a host object.
+ *
+ * @param {object} nodes
+ * @returns {array} hosts
+ */
+ nodesToHost(nodes: any): any[];
+ /**
+ * Transforms an url string to a host object
+ *
+ * @param {string} url
+ * @returns {object} host
+ */
+ urlToHost(url: string): any;
+}
+
+declare function defaultNodeFilter(node: Connection): boolean;
+declare function roundRobinSelector(): (connections: Connection[]) => Connection;
+declare function randomSelector(connections: Connection[]): Connection;
+
+export declare const internals: {
+ defaultNodeFilter: typeof defaultNodeFilter;
+ roundRobinSelector: typeof roundRobinSelector;
+ randomSelector: typeof randomSelector;
+};
+
+export {};
diff --git a/lib/ConnectionPool.js b/lib/ConnectionPool.js
index 47e461751..32dacfcca 100644
--- a/lib/ConnectionPool.js
+++ b/lib/ConnectionPool.js
@@ -330,9 +330,9 @@ ConnectionPool.resurrectStrategies = {
function defaultNodeFilter (node) {
// avoid master only nodes
- if (!!node.master === true &&
- !!node.data === false &&
- !!node.ingest === false) {
+ if (!!node.roles.master === true &&
+ !!node.roles.data === false &&
+ !!node.roles.ingest === false) {
return false
}
return true
diff --git a/lib/Serializer.d.ts b/lib/Serializer.d.ts
new file mode 100644
index 000000000..0a05e431e
--- /dev/null
+++ b/lib/Serializer.d.ts
@@ -0,0 +1,6 @@
+export default class Serializer {
+ serialize(object: any): string;
+ deserialize(json: string): any;
+ ndserialize(array: any[]): string;
+ qserialize(object: any): string;
+}
diff --git a/lib/Transport.d.ts b/lib/Transport.d.ts
new file mode 100644
index 000000000..8839ce3eb
--- /dev/null
+++ b/lib/Transport.d.ts
@@ -0,0 +1,47 @@
+import ConnectionPool from './ConnectionPool';
+import Connection from './Connection';
+import Serializer from './Serializer';
+
+declare type noopFn = (...args: any[]) => void;
+declare type emitFn = (event: string | symbol, ...args: any[]) => boolean;
+
+interface TransportOptions {
+ emit: emitFn & noopFn;
+ connectionPool: ConnectionPool;
+ serializer: Serializer;
+ maxRetries: number;
+ requestTimeout: number | string;
+ suggestCompression: boolean;
+ sniffInterval: number;
+ sniffOnConnectionFault: boolean;
+ sniffEndpoint: string;
+ sniffOnStart: boolean;
+}
+
+export interface ApiResponse {
+ body: any;
+ statusCode: number | null;
+ headers: any;
+ warnings: any[] | null;
+}
+
+export default class Transport {
+ emit: emitFn & noopFn;
+ connectionPool: ConnectionPool;
+ serializer: Serializer;
+ maxRetries: number;
+ requestTimeout: number;
+ suggestCompression: boolean;
+ sniffInterval: number;
+ sniffOnConnectionFault: boolean;
+ sniffEndpoint: string;
+ _sniffEnabled: boolean;
+ _nextSniff: number;
+ _isSniffing: boolean;
+ constructor(opts: TransportOptions);
+ request(params: any, callback: (err: Error | null, result: ApiResponse) => void): any;
+ getConnection(): Connection | null;
+ sniff(callback?: (...args: any[]) => void): void;
+}
+
+export {};
diff --git a/lib/errors.d.ts b/lib/errors.d.ts
new file mode 100644
index 000000000..ec5a9f6b3
--- /dev/null
+++ b/lib/errors.d.ts
@@ -0,0 +1,48 @@
+export declare class TimeoutError extends Error {
+ name: string;
+ message: string;
+ request: any;
+ constructor(message: string, request: any);
+}
+
+export declare class ConnectionError extends Error {
+ name: string;
+ message: string;
+ request: any;
+ constructor(message: string, request: any);
+}
+
+export declare class NoLivingConnectionsError extends Error {
+ name: string;
+ message: string;
+ constructor(message: string);
+}
+
+export declare class SerializationError extends Error {
+ name: string;
+ message: string;
+ constructor(message: string);
+}
+
+export declare class DeserializationError extends Error {
+ name: string;
+ message: string;
+ constructor(message: string);
+}
+
+export declare class ConfigurationError extends Error {
+ name: string;
+ message: string;
+ constructor(message: string);
+}
+
+export declare class ResponseError extends Error {
+ name: string;
+ message: string;
+ body: any;
+ statusCode: number;
+ headers: any;
+ constructor({ body, statusCode, headers }: {
+ [key: string]: any;
+ });
+}
diff --git a/lib/symbols.js b/lib/symbols.js
deleted file mode 100644
index 313b6d4ce..000000000
--- a/lib/symbols.js
+++ /dev/null
@@ -1,15 +0,0 @@
-'use strict'
-
-const kTransport = Symbol('elasticsearch-transport')
-const kConnection = Symbol('elasticsearch-connection')
-const kConnectionPool = Symbol('elasticsearch-connection-pool')
-const kSerializer = Symbol('elasticsearch-serializer')
-const kSelector = Symbol('elasticsearch-selector')
-
-module.exports = {
- kTransport,
- kConnection,
- kConnectionPool,
- kSerializer,
- kSelector
-}
diff --git a/package.json b/package.json
index f6ed4979b..067af3ba8 100644
--- a/package.json
+++ b/package.json
@@ -15,9 +15,10 @@
"index"
],
"scripts": {
- "test": "npm run lint && npm run test:unit",
+ "test": "npm run lint && npm run test:unit && npm run test:types",
"test:unit": "tap test/unit/*.test.js -J -T",
"test:integration": "tap test/integration/index.js -T --harmony",
+ "test:types": "tsc --project ./test/types/tsconfig.json",
"lint": "standard",
"lint:fix": "standard --fix",
"generate": "node scripts/run.js",
@@ -33,6 +34,7 @@
"company": "Elasticsearch BV"
},
"devDependencies": {
+ "@types/node": "^10.12.10",
"dedent": "^0.7.0",
"deepmerge": "^2.2.1",
"into-stream": "^4.0.0",
@@ -45,6 +47,7 @@
"standard": "^12.0.0",
"stoppable": "^1.0.7",
"tap": "^12.0.1",
+ "typescript": "^3.1.6",
"workq": "^2.1.0"
},
"dependencies": {
diff --git a/scripts/run.js b/scripts/run.js
index 0dc1ec61b..7776efba5 100644
--- a/scripts/run.js
+++ b/scripts/run.js
@@ -22,6 +22,7 @@ function start (opts) {
const packageFolder = join(__dirname, '..', 'api')
const apiOutputFolder = join(packageFolder, 'api')
const mainOutputFile = join(packageFolder, 'index.js')
+ const typesOutputFile = join(packageFolder, 'generated.d.ts')
log.text = 'Cleaning API folder...'
rimraf.sync(join(apiOutputFolder, '*.js'))
@@ -35,9 +36,15 @@ function start (opts) {
readdirSync(apiFolder).forEach(generateApiFile(apiFolder, log))
readdirSync(xPackFolder).forEach(generateApiFile(xPackFolder, log))
+ const { fn: factory, types } = genFactory(apiOutputFolder)
writeFileSync(
mainOutputFile,
- genFactory(apiOutputFolder),
+ factory,
+ { encoding: 'utf8' }
+ )
+ writeFileSync(
+ typesOutputFile,
+ types,
{ encoding: 'utf8' }
)
lintFiles(log)
@@ -69,6 +76,7 @@ function start (opts) {
return log.fail(err.message)
}
log.succeed('Done!')
+ console.log('Remember to copy the generated types into the index.d.ts file')
})
}
}
diff --git a/scripts/utils/genMain.js b/scripts/utils/genMain.js
index 79d98fa66..419daf468 100644
--- a/scripts/utils/genMain.js
+++ b/scripts/utils/genMain.js
@@ -7,6 +7,28 @@ const deepmerge = require('deepmerge')
function genFactory (folder) {
// get all the API files
const apiFiles = readdirSync(folder)
+ const types = apiFiles
+ .map(file => {
+ return file
+ .slice(0, -3) // remove `.js` extension
+ .split('.')
+ .reverse()
+ .reduce((acc, val) => {
+ const obj = {
+ [val]: acc === null
+ ? 'apiMethod'
+ : acc
+ }
+ if (isSnakeCased(val)) {
+ obj[camelify(val)] = acc === null
+ ? 'apiMethod'
+ : acc
+ }
+ return obj
+ }, null)
+ })
+ .reduce((acc, val) => deepmerge(acc, val), {})
+
const apis = apiFiles
.map(file => {
// const name = format(file.slice(0, -3))
@@ -38,6 +60,14 @@ function genFactory (folder) {
// remove useless quotes
.replace(/"/g, '')
+ // serialize the type object
+ const typesStr = Object.keys(types)
+ .map(key => `${key}: ${JSON.stringify(types[key], null, 2)}`)
+ .join('\n')
+ // remove useless quotes and commas
+ .replace(/"/g, '')
+ .replace(/,/g, '')
+
const fn = dedent`
'use strict'
@@ -75,7 +105,7 @@ function genFactory (folder) {
`
// new line at the end of file
- return fn + '\n'
+ return { fn: fn + '\n', types: typesStr }
}
// from snake_case to camelCase
diff --git a/test/types/index.ts b/test/types/index.ts
new file mode 100644
index 000000000..ad9af76c2
--- /dev/null
+++ b/test/types/index.ts
@@ -0,0 +1,29 @@
+'use strict'
+
+import { Client, ApiResponse } from '../../index'
+
+const client = new Client({ node: 'http://localhost:9200' })
+
+// Callbacks
+client.info((err: Error | null, result: ApiResponse) => {})
+
+client.index({
+ index: 'test',
+ type: 'test',
+ id: 'test',
+ body: { hello: 'world' }
+}, (err: Error | null, result: ApiResponse) => {})
+
+// Promises
+client.info()
+ .then((result: ApiResponse) => {})
+ .catch((err: Error) => {})
+
+client.index({
+ index: 'test',
+ type: 'test',
+ id: 'test',
+ body: { hello: 'world' }
+})
+ .then((result: ApiResponse) => {})
+ .catch((err: Error) => {})
diff --git a/test/types/tsconfig.json b/test/types/tsconfig.json
new file mode 100644
index 000000000..46f366eb2
--- /dev/null
+++ b/test/types/tsconfig.json
@@ -0,0 +1,11 @@
+{
+ "compilerOptions": {
+ "target": "es6",
+ "module": "commonjs",
+ "noEmit": true,
+ "strict": true
+ },
+ "files": [
+ "./index.ts"
+ ]
+}
diff --git a/test/unit/client.test.js b/test/unit/client.test.js
index e59d83cab..95a87ffb4 100644
--- a/test/unit/client.test.js
+++ b/test/unit/client.test.js
@@ -2,15 +2,14 @@
const { test } = require('tap')
const { URL } = require('url')
-const { Client, symbols } = require('../../index')
-const { kConnectionPool } = symbols
+const { Client } = require('../../index')
test('Configure host', t => {
t.test('Single string', t => {
const client = new Client({
node: 'http://localhost:9200'
})
- const pool = client[kConnectionPool]
+ const pool = client.connectionPool
t.match(pool.connections.get('http://localhost:9200/'), {
url: new URL('http://localhost:9200'),
id: 'http://localhost:9200/',
@@ -32,7 +31,7 @@ test('Configure host', t => {
const client = new Client({
nodes: ['http://localhost:9200', 'http://localhost:9201']
})
- const pool = client[kConnectionPool]
+ const pool = client.connectionPool
t.match(pool.connections.get('http://localhost:9200/'), {
url: new URL('http://localhost:9200'),
id: 'http://localhost:9200/',
@@ -77,7 +76,7 @@ test('Configure host', t => {
ssl: 'ssl'
}
})
- const pool = client[kConnectionPool]
+ const pool = client.connectionPool
t.match(pool.connections.get('node'), {
url: new URL('http://localhost:9200'),
id: 'node',
@@ -112,7 +111,7 @@ test('Configure host', t => {
ssl: 'ssl'
}]
})
- const pool = client[kConnectionPool]
+ const pool = client.connectionPool
t.match(pool.connections.get('node1'), {
url: new URL('http://localhost:9200'),
id: 'node1',
@@ -146,7 +145,7 @@ test('Configure host', t => {
id: 'node'
}
})
- const pool = client[kConnectionPool]
+ const pool = client.connectionPool
t.match(pool.connections.get('node'), {
url: new URL('http://localhost:9200'),
headers: { 'x-foo': 'bar' }