API generation

This commit is contained in:
delvedor
2021-01-18 17:15:53 +01:00
parent a91c55b1f8
commit b0769bec11
10 changed files with 246 additions and 158 deletions

View File

@ -23,8 +23,8 @@
/* eslint no-unused-vars: 0 */
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
const acceptedQuerystring = ['format', 'local', 'h', 'help', 's', 'v', 'expand_wildcards', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'bytes', 'master_timeout', 'fields', 'time', 'ts', 'health', 'pri', 'include_unloaded_segments', 'full_id', 'active_only', 'detailed', 'index', 'ignore_unavailable', 'node_id', 'actions', 'parent_task', 'allow_no_match', 'allow_no_datafeeds', 'allow_no_jobs', 'from', 'size']
const snakeCase = { expandWildcards: 'expand_wildcards', errorTrace: 'error_trace', filterPath: 'filter_path', masterTimeout: 'master_timeout', includeUnloadedSegments: 'include_unloaded_segments', fullId: 'full_id', activeOnly: 'active_only', ignoreUnavailable: 'ignore_unavailable', nodeId: 'node_id', parentTask: 'parent_task', allowNoMatch: 'allow_no_match', allowNoDatafeeds: 'allow_no_datafeeds', allowNoJobs: 'allow_no_jobs' }
const acceptedQuerystring = ['format', 'local', 'h', 'help', 's', 'v', 'expand_wildcards', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'bytes', 'master_timeout', 'fields', 'time', 'ts', 'health', 'pri', 'include_unloaded_segments', 'full_id', 'include_bootstrap', 'active_only', 'detailed', 'index', 'ignore_unavailable', 'nodes', 'actions', 'parent_task_id', 'allow_no_match', 'allow_no_datafeeds', 'allow_no_jobs', 'from', 'size']
const snakeCase = { expandWildcards: 'expand_wildcards', errorTrace: 'error_trace', filterPath: 'filter_path', masterTimeout: 'master_timeout', includeUnloadedSegments: 'include_unloaded_segments', fullId: 'full_id', includeBootstrap: 'include_bootstrap', activeOnly: 'active_only', ignoreUnavailable: 'ignore_unavailable', parentTaskId: 'parent_task_id', allowNoMatch: 'allow_no_match', allowNoDatafeeds: 'allow_no_datafeeds', allowNoJobs: 'allow_no_jobs' }
function CatApi (transport, ConfigurationError) {
this.transport = transport

View File

@ -23,8 +23,8 @@
/* eslint no-unused-vars: 0 */
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
const acceptedQuerystring = ['allow_no_match', 'allow_no_jobs', 'force', 'timeout', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'requests_per_second', 'allow_no_forecasts', 'wait_for_completion', 'lines_to_sample', 'line_merge_size_limit', 'charset', 'format', 'has_header_row', 'column_names', 'delimiter', 'quote', 'should_trim_fields', 'grok_pattern', 'timestamp_field', 'timestamp_format', 'explain', 'calc_interim', 'start', 'end', 'advance_time', 'skip_time', 'duration', 'expires_in', 'max_model_memory', 'expand', 'exclude_interim', 'from', 'size', 'anomaly_score', 'sort', 'desc', 'job_id', 'partition_field_value', 'exclude_generated', 'verbose', 'allow_no_datafeeds', 'influencer_score', 'top_n', 'bucket_span', 'overall_score', 'record_score', 'include', 'include_model_definition', 'decompress_definition', 'tags', 'reset_start', 'reset_end', 'ignore_unavailable', 'allow_no_indices', 'ignore_throttled', 'expand_wildcards', 'delete_intervening_results', 'enabled']
const snakeCase = { allowNoMatch: 'allow_no_match', allowNoJobs: 'allow_no_jobs', errorTrace: 'error_trace', filterPath: 'filter_path', requestsPerSecond: 'requests_per_second', allowNoForecasts: 'allow_no_forecasts', waitForCompletion: 'wait_for_completion', linesToSample: 'lines_to_sample', lineMergeSizeLimit: 'line_merge_size_limit', hasHeaderRow: 'has_header_row', columnNames: 'column_names', shouldTrimFields: 'should_trim_fields', grokPattern: 'grok_pattern', timestampField: 'timestamp_field', timestampFormat: 'timestamp_format', calcInterim: 'calc_interim', advanceTime: 'advance_time', skipTime: 'skip_time', expiresIn: 'expires_in', maxModelMemory: 'max_model_memory', excludeInterim: 'exclude_interim', anomalyScore: 'anomaly_score', jobId: 'job_id', partitionFieldValue: 'partition_field_value', excludeGenerated: 'exclude_generated', allowNoDatafeeds: 'allow_no_datafeeds', influencerScore: 'influencer_score', topN: 'top_n', bucketSpan: 'bucket_span', overallScore: 'overall_score', recordScore: 'record_score', includeModelDefinition: 'include_model_definition', decompressDefinition: 'decompress_definition', resetStart: 'reset_start', resetEnd: 'reset_end', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', ignoreThrottled: 'ignore_throttled', expandWildcards: 'expand_wildcards', deleteInterveningResults: 'delete_intervening_results' }
const acceptedQuerystring = ['allow_no_match', 'allow_no_jobs', 'force', 'timeout', 'pretty', 'human', 'error_trace', 'source', 'filter_path', 'requests_per_second', 'allow_no_forecasts', 'wait_for_completion', 'calc_interim', 'start', 'end', 'advance_time', 'skip_time', 'duration', 'expires_in', 'max_model_memory', 'expand', 'exclude_interim', 'from', 'size', 'anomaly_score', 'sort', 'desc', 'job_id', 'partition_field_value', 'exclude_generated', 'verbose', 'allow_no_datafeeds', 'influencer_score', 'top_n', 'bucket_span', 'overall_score', 'record_score', 'include', 'include_model_definition', 'decompress_definition', 'tags', 'reset_start', 'reset_end', 'ignore_unavailable', 'allow_no_indices', 'ignore_throttled', 'expand_wildcards', 'delete_intervening_results', 'enabled']
const snakeCase = { allowNoMatch: 'allow_no_match', allowNoJobs: 'allow_no_jobs', errorTrace: 'error_trace', filterPath: 'filter_path', requestsPerSecond: 'requests_per_second', allowNoForecasts: 'allow_no_forecasts', waitForCompletion: 'wait_for_completion', calcInterim: 'calc_interim', advanceTime: 'advance_time', skipTime: 'skip_time', expiresIn: 'expires_in', maxModelMemory: 'max_model_memory', excludeInterim: 'exclude_interim', anomalyScore: 'anomaly_score', jobId: 'job_id', partitionFieldValue: 'partition_field_value', excludeGenerated: 'exclude_generated', allowNoDatafeeds: 'allow_no_datafeeds', influencerScore: 'influencer_score', topN: 'top_n', bucketSpan: 'bucket_span', overallScore: 'overall_score', recordScore: 'record_score', includeModelDefinition: 'include_model_definition', decompressDefinition: 'decompress_definition', resetStart: 'reset_start', resetEnd: 'reset_end', ignoreUnavailable: 'ignore_unavailable', allowNoIndices: 'allow_no_indices', ignoreThrottled: 'ignore_throttled', expandWildcards: 'expand_wildcards', deleteInterveningResults: 'delete_intervening_results' }
function MlApi (transport, ConfigurationError) {
this.transport = transport
@ -475,33 +475,6 @@ MlApi.prototype.explainDataFrameAnalytics = function mlExplainDataFrameAnalytics
return this.transport.request(request, options, callback)
}
MlApi.prototype.findFileStructure = function mlFindFileStructureApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters
if (params['body'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: body')
return handleError(err, callback)
}
var { method, body, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'POST'
path = '/' + '_ml' + '/' + 'find_file_structure'
// build request object
const request = {
method,
path,
bulkBody: body,
querystring
}
return this.transport.request(request, options, callback)
}
MlApi.prototype.flushJob = function mlFlushJobApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
@ -1832,7 +1805,6 @@ Object.defineProperties(MlApi.prototype, {
estimate_model_memory: { get () { return this.estimateModelMemory } },
evaluate_data_frame: { get () { return this.evaluateDataFrame } },
explain_data_frame_analytics: { get () { return this.explainDataFrameAnalytics } },
find_file_structure: { get () { return this.findFileStructure } },
flush_job: { get () { return this.flushJob } },
get_buckets: { get () { return this.getBuckets } },
get_calendar_events: { get () { return this.getCalendarEvents } },

View File

@ -176,17 +176,27 @@ RollupApi.prototype.rollup = function rollupRollupApi (params, options, callback
const err = new this[kConfigurationError]('Missing required parameter: index')
return handleError(err, callback)
}
if (params['rollup_index'] == null && params['rollupIndex'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: rollup_index or rollupIndex')
return handleError(err, callback)
}
if (params['body'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: body')
return handleError(err, callback)
}
var { method, body, index, ...querystring } = params
// check required url components
if ((params['rollup_index'] != null || params['rollupIndex'] != null) && (params['index'] == null)) {
const err = new this[kConfigurationError]('Missing required parameter of the url: index')
return handleError(err, callback)
}
var { method, body, index, rollupIndex, rollup_index, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'POST'
path = '/' + encodeURIComponent(index) + '/' + '_rollup'
path = '/' + encodeURIComponent(index) + '/' + '_rollup' + '/' + encodeURIComponent(rollup_index || rollupIndex)
// build request object
const request = {

View File

@ -23,8 +23,8 @@
/* eslint no-unused-vars: 0 */
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
const acceptedQuerystring = ['analyzer', 'analyze_wildcard', 'ccs_minimize_roundtrips', 'default_operator', 'df', 'explain', 'stored_fields', 'docvalue_fields', 'from', 'ignore_unavailable', 'ignore_throttled', 'allow_no_indices', 'expand_wildcards', 'lenient', 'preference', 'q', 'routing', 'scroll', 'search_type', 'size', 'sort', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'terminate_after', 'stats', 'suggest_field', 'suggest_mode', 'suggest_size', 'suggest_text', 'timeout', 'track_scores', 'track_total_hits', 'allow_partial_search_results', 'typed_keys', 'version', 'seq_no_primary_term', 'request_cache', 'batched_reduce_size', 'max_concurrent_shard_requests', 'pre_filter_shard_size', 'rest_total_hits_as_int', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
const snakeCase = { analyzeWildcard: 'analyze_wildcard', ccsMinimizeRoundtrips: 'ccs_minimize_roundtrips', defaultOperator: 'default_operator', storedFields: 'stored_fields', docvalueFields: 'docvalue_fields', ignoreUnavailable: 'ignore_unavailable', ignoreThrottled: 'ignore_throttled', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', terminateAfter: 'terminate_after', suggestField: 'suggest_field', suggestMode: 'suggest_mode', suggestSize: 'suggest_size', suggestText: 'suggest_text', trackScores: 'track_scores', trackTotalHits: 'track_total_hits', allowPartialSearchResults: 'allow_partial_search_results', typedKeys: 'typed_keys', seqNoPrimaryTerm: 'seq_no_primary_term', requestCache: 'request_cache', batchedReduceSize: 'batched_reduce_size', maxConcurrentShardRequests: 'max_concurrent_shard_requests', preFilterShardSize: 'pre_filter_shard_size', restTotalHitsAsInt: 'rest_total_hits_as_int', errorTrace: 'error_trace', filterPath: 'filter_path' }
const acceptedQuerystring = ['analyzer', 'analyze_wildcard', 'ccs_minimize_roundtrips', 'default_operator', 'df', 'explain', 'stored_fields', 'docvalue_fields', 'from', 'ignore_unavailable', 'ignore_throttled', 'allow_no_indices', 'expand_wildcards', 'lenient', 'preference', 'q', 'routing', 'scroll', 'search_type', 'size', 'sort', '_source', '_source_excludes', '_source_exclude', '_source_includes', '_source_include', 'terminate_after', 'stats', 'suggest_field', 'suggest_mode', 'suggest_size', 'suggest_text', 'timeout', 'track_scores', 'track_total_hits', 'allow_partial_search_results', 'typed_keys', 'version', 'seq_no_primary_term', 'request_cache', 'batched_reduce_size', 'max_concurrent_shard_requests', 'pre_filter_shard_size', 'rest_total_hits_as_int', 'min_compatible_shard_node', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
const snakeCase = { analyzeWildcard: 'analyze_wildcard', ccsMinimizeRoundtrips: 'ccs_minimize_roundtrips', defaultOperator: 'default_operator', storedFields: 'stored_fields', docvalueFields: 'docvalue_fields', ignoreUnavailable: 'ignore_unavailable', ignoreThrottled: 'ignore_throttled', allowNoIndices: 'allow_no_indices', expandWildcards: 'expand_wildcards', searchType: 'search_type', _sourceExcludes: '_source_excludes', _sourceExclude: '_source_exclude', _sourceIncludes: '_source_includes', _sourceInclude: '_source_include', terminateAfter: 'terminate_after', suggestField: 'suggest_field', suggestMode: 'suggest_mode', suggestSize: 'suggest_size', suggestText: 'suggest_text', trackScores: 'track_scores', trackTotalHits: 'track_total_hits', allowPartialSearchResults: 'allow_partial_search_results', typedKeys: 'typed_keys', seqNoPrimaryTerm: 'seq_no_primary_term', requestCache: 'request_cache', batchedReduceSize: 'batched_reduce_size', maxConcurrentShardRequests: 'max_concurrent_shard_requests', preFilterShardSize: 'pre_filter_shard_size', restTotalHitsAsInt: 'rest_total_hits_as_int', minCompatibleShardNode: 'min_compatible_shard_node', errorTrace: 'error_trace', filterPath: 'filter_path' }
function searchApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)

65
api/api/text_structure.js Normal file
View File

@ -0,0 +1,65 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
const acceptedQuerystring = ['lines_to_sample', 'line_merge_size_limit', 'timeout', 'charset', 'format', 'has_header_row', 'column_names', 'delimiter', 'quote', 'should_trim_fields', 'grok_pattern', 'timestamp_field', 'timestamp_format', 'explain', 'pretty', 'human', 'error_trace', 'source', 'filter_path']
const snakeCase = { linesToSample: 'lines_to_sample', lineMergeSizeLimit: 'line_merge_size_limit', hasHeaderRow: 'has_header_row', columnNames: 'column_names', shouldTrimFields: 'should_trim_fields', grokPattern: 'grok_pattern', timestampField: 'timestamp_field', timestampFormat: 'timestamp_format', errorTrace: 'error_trace', filterPath: 'filter_path' }
function TextStructureApi (transport, ConfigurationError) {
this.transport = transport
this[kConfigurationError] = ConfigurationError
}
TextStructureApi.prototype.findStructure = function textStructureFindStructureApi (params, options, callback) {
;[params, options, callback] = normalizeArguments(params, options, callback)
// check required parameters
if (params['body'] == null) {
const err = new this[kConfigurationError]('Missing required parameter: body')
return handleError(err, callback)
}
var { method, body, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
var path = ''
if (method == null) method = 'POST'
path = '/' + '_text_structure' + '/' + 'find_structure'
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
return this.transport.request(request, options, callback)
}
Object.defineProperties(TextStructureApi.prototype, {
find_structure: { get () { return this.findStructure } }
})
module.exports = TextStructureApi

View File

@ -84,6 +84,7 @@ const SecurityApi = require('./api/security')
const SlmApi = require('./api/slm')
const SqlApi = require('./api/sql')
const SslApi = require('./api/ssl')
const TextStructureApi = require('./api/text_structure')
const TransformApi = require('./api/transform')
const WatcherApi = require('./api/watcher')
const XpackApi = require('./api/xpack')
@ -114,6 +115,7 @@ const kSecurity = Symbol('Security')
const kSlm = Symbol('Slm')
const kSql = Symbol('Sql')
const kSsl = Symbol('Ssl')
const kTextStructure = Symbol('TextStructure')
const kTransform = Symbol('Transform')
const kWatcher = Symbol('Watcher')
const kXpack = Symbol('Xpack')
@ -145,6 +147,7 @@ function ESAPI (opts) {
this[kSlm] = null
this[kSql] = null
this[kSsl] = null
this[kTextStructure] = null
this[kTransform] = null
this[kWatcher] = null
this[kXpack] = null
@ -417,6 +420,15 @@ Object.defineProperties(ESAPI.prototype, {
return this[kSsl]
}
},
textStructure: {
get () {
if (this[kTextStructure] === null) {
this[kTextStructure] = new TextStructureApi(this.transport, this[kConfigurationError])
}
return this[kTextStructure]
}
},
text_structure: { get () { return this.textStructure } },
transform: {
get () {
if (this[kTransform] === null) {

4
api/kibana.d.ts vendored
View File

@ -288,7 +288,6 @@ interface KibanaClient {
estimateModelMemory<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlEstimateModelMemory<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
evaluateDataFrame<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlEvaluateDataFrame<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
explainDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlExplainDataFrameAnalytics<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
findFileStructure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params?: RequestParams.MlFindFileStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
flushJob<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlFlushJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
forecast<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.MlForecast, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getBuckets<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlGetBuckets<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
@ -445,6 +444,9 @@ interface KibanaClient {
list<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TasksList, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
}
termvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.Termvectors<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
textStructure: {
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.TextStructureFindStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
}
transform: {
deleteTransform<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TransformDeleteTransform, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
getTransform<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TransformGetTransform, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>

View File

@ -297,6 +297,7 @@ export interface CatPlugins extends Generic {
master_timeout?: string;
h?: string | string[];
help?: boolean;
include_bootstrap?: boolean;
s?: string | string[];
v?: boolean;
}
@ -360,10 +361,10 @@ export interface CatSnapshots extends Generic {
export interface CatTasks extends Generic {
format?: string;
node_id?: string | string[];
nodes?: string | string[];
actions?: string | string[];
detailed?: boolean;
parent_task?: number;
parent_task_id?: string;
h?: string | string[];
help?: boolean;
s?: string | string[];
@ -1497,24 +1498,6 @@ export interface MlExplainDataFrameAnalytics<T = RequestBody> extends Generic {
body?: T;
}
export interface MlFindFileStructure<T = RequestNDBody> extends Generic {
lines_to_sample?: number;
line_merge_size_limit?: number;
timeout?: string;
charset?: string;
format?: 'ndjson' | 'xml' | 'delimited' | 'semi_structured_text';
has_header_row?: boolean;
column_names?: string | string[];
delimiter?: string;
quote?: string;
should_trim_fields?: boolean;
grok_pattern?: string;
timestamp_field?: string;
timestamp_format?: string;
explain?: boolean;
body: T;
}
export interface MlFlushJob<T = RequestBody> extends Generic {
job_id: string;
calc_interim?: boolean;
@ -2004,6 +1987,7 @@ export interface RollupPutJob<T = RequestBody> extends Generic {
export interface RollupRollup<T = RequestBody> extends Generic {
index: string;
rollup_index: string;
body: T;
}
@ -2082,6 +2066,7 @@ export interface Search<T = RequestBody> extends Generic {
max_concurrent_shard_requests?: number;
pre_filter_shard_size?: number;
rest_total_hits_as_int?: boolean;
min_compatible_shard_node?: string;
body?: T;
}
@ -2436,6 +2421,24 @@ export interface Termvectors<T = RequestBody> extends Generic {
body?: T;
}
export interface TextStructureFindStructure<T = RequestBody> extends Generic {
lines_to_sample?: number;
line_merge_size_limit?: number;
timeout?: string;
charset?: string;
format?: 'ndjson' | 'xml' | 'delimited' | 'semi_structured_text';
has_header_row?: boolean;
column_names?: string | string[];
delimiter?: string;
quote?: string;
should_trim_fields?: boolean;
grok_pattern?: string;
timestamp_field?: string;
timestamp_format?: string;
explain?: boolean;
body: T;
}
export interface TransformDeleteTransform extends Generic {
transform_id: string;
force?: boolean;

View File

@ -354,7 +354,7 @@ _Default:_ `5`
[discrete]
=== autoscaling.deleteAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.deleteAutoscalingPolicy({
@ -371,7 +371,7 @@ link:{ref}/autoscaling-delete-autoscaling-policy.html[Documentation] +
[discrete]
=== autoscaling.getAutoscalingCapacity
*Stability:* experimental
[source,ts]
----
client.autoscaling.getAutoscalingCapacity()
@ -381,7 +381,7 @@ link:{ref}/autoscaling-get-autoscaling-capacity.html[Documentation] +
[discrete]
=== autoscaling.getAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.getAutoscalingPolicy({
@ -398,7 +398,7 @@ link:{ref}/autoscaling-get-autoscaling-policy.html[Documentation] +
[discrete]
=== autoscaling.putAutoscalingPolicy
*Stability:* experimental
[source,ts]
----
client.autoscaling.putAutoscalingPolicy({
@ -1182,6 +1182,7 @@ client.cat.plugins({
master_timeout: string,
h: string | string[],
help: boolean,
include_bootstrap: boolean,
s: string | string[],
v: boolean
})
@ -1204,6 +1205,9 @@ link:{ref}/cat-plugins.html[Documentation] +
|`help`
|`boolean` - Return help information
|`include_bootstrap` or `includeBootstrap`
|`boolean` - Include bootstrap plugins in the response
|`s`
|`string \| string[]` - Comma-separated list of column names or column aliases to sort by
@ -1447,15 +1451,15 @@ link:{ref}/cat-snapshots.html[Documentation] +
[discrete]
=== cat.tasks
*Stability:* experimental
[source,ts]
----
client.cat.tasks({
format: string,
node_id: string | string[],
nodes: string | string[],
actions: string | string[],
detailed: boolean,
parent_task: number,
parent_task_id: string,
h: string | string[],
help: boolean,
s: string | string[],
@ -1469,7 +1473,7 @@ link:{ref}/tasks.html[Documentation] +
|`format`
|`string` - a short version of the Accept header, e.g. json, yaml
|`node_id` or `nodeId`
|`nodes`
|`string \| string[]` - A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes
|`actions`
@ -1478,8 +1482,8 @@ link:{ref}/tasks.html[Documentation] +
|`detailed`
|`boolean` - Return detailed task information (default: false)
|`parent_task` or `parentTask`
|`number` - Return tasks with specified parent task id. Set to -1 to return all.
|`parent_task_id` or `parentTaskId`
|`string` - Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all.
|`h`
|`string \| string[]` - Comma-separated list of column names to display
@ -2925,7 +2929,7 @@ link:{ref}/enrich-stats-api.html[Documentation] +
[discrete]
=== eql.delete
*Stability:* beta
[source,ts]
----
client.eql.delete({
@ -2942,7 +2946,7 @@ link:{ref}/eql-search-api.html[Documentation] +
[discrete]
=== eql.get
*Stability:* beta
[source,ts]
----
client.eql.get({
@ -2968,7 +2972,7 @@ _Default:_ `5d`
[discrete]
=== eql.search
*Stability:* beta
[source,ts]
----
client.eql.search({
@ -5309,10 +5313,10 @@ link:{ref}/indices-stats.html[Documentation] +
|`string \| string[]` - A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices
|`completion_fields` or `completionFields`
|`string \| string[]` - A comma-separated list of fields for `fielddata` and `suggest` index metric (supports wildcards)
|`string \| string[]` - A comma-separated list of fields for the `completion` index metric (supports wildcards)
|`fielddata_fields` or `fielddataFields`
|`string \| string[]` - A comma-separated list of fields for `fielddata` index metric (supports wildcards)
|`string \| string[]` - A comma-separated list of fields for the `fielddata` index metric (supports wildcards)
|`fields`
|`string \| string[]` - A comma-separated list of fields for `fielddata` and `completion` index metric (supports wildcards)
@ -6127,82 +6131,6 @@ link:{ref}/explain-dfanalytics.html[Documentation] +
|===
[discrete]
=== ml.findFileStructure
*Stability:* experimental
[source,ts]
----
client.ml.findFileStructure({
lines_to_sample: number,
line_merge_size_limit: number,
timeout: string,
charset: string,
format: 'ndjson' | 'xml' | 'delimited' | 'semi_structured_text',
has_header_row: boolean,
column_names: string | string[],
delimiter: string,
quote: string,
should_trim_fields: boolean,
grok_pattern: string,
timestamp_field: string,
timestamp_format: string,
explain: boolean,
body: object
})
----
link:{ref}/ml-find-file-structure.html[Documentation] +
[cols=2*]
|===
|`lines_to_sample` or `linesToSample`
|`number` - How many lines of the file should be included in the analysis +
_Default:_ `1000`
|`line_merge_size_limit` or `lineMergeSizeLimit`
|`number` - Maximum number of characters permitted in a single message when lines are merged to create messages. +
_Default:_ `10000`
|`timeout`
|`string` - Timeout after which the analysis will be aborted +
_Default:_ `25s`
|`charset`
|`string` - Optional parameter to specify the character set of the file
|`format`
|`'ndjson' \| 'xml' \| 'delimited' \| 'semi_structured_text'` - Optional parameter to specify the high level file format
|`has_header_row` or `hasHeaderRow`
|`boolean` - Optional parameter to specify whether a delimited file includes the column names in its first row
|`column_names` or `columnNames`
|`string \| string[]` - Optional parameter containing a comma separated list of the column names for a delimited file
|`delimiter`
|`string` - Optional parameter to specify the delimiter character for a delimited file - must be a single character
|`quote`
|`string` - Optional parameter to specify the quote character for a delimited file - must be a single character
|`should_trim_fields` or `shouldTrimFields`
|`boolean` - Optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them
|`grok_pattern` or `grokPattern`
|`string` - Optional parameter to specify the Grok pattern that should be used to extract fields from messages in a semi-structured text file
|`timestamp_field` or `timestampField`
|`string` - Optional parameter to specify the timestamp field in the file
|`timestamp_format` or `timestampFormat`
|`string` - Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format
|`explain`
|`boolean` - Whether to include a commentary on how the structure was derived
|`body`
|`object` - The contents of the file to be analyzed
|===
[discrete]
=== ml.flushJob
@ -7897,10 +7825,10 @@ link:{ref}/cluster-nodes-stats.html[Documentation] +
|`string \| string[]` - Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) metric isn't specified.
|`completion_fields` or `completionFields`
|`string \| string[]` - A comma-separated list of fields for `fielddata` and `suggest` index metric (supports wildcards)
|`string \| string[]` - A comma-separated list of fields for the `completion` index metric (supports wildcards)
|`fielddata_fields` or `fielddataFields`
|`string \| string[]` - A comma-separated list of fields for `fielddata` index metric (supports wildcards)
|`string \| string[]` - A comma-separated list of fields for the `fielddata` index metric (supports wildcards)
|`fields`
|`string \| string[]` - A comma-separated list of fields for `fielddata` and `completion` index metric (supports wildcards)
@ -8259,6 +8187,7 @@ link:{ref}/rollup-put-job.html[Documentation] +
----
client.rollup.rollup({
index: string,
rollup_index: string,
body: object
})
----
@ -8268,6 +8197,9 @@ link:{ref}/rollup-api.html[Documentation] +
|`index`
|`string` - The index to roll up
|`rollup_index` or `rollupIndex`
|`string` - The name of the rollup index to create
|`body`
|`object` - The rollup configuration
@ -8448,6 +8380,7 @@ client.search({
max_concurrent_shard_requests: number,
pre_filter_shard_size: number,
rest_total_hits_as_int: boolean,
min_compatible_shard_node: string,
body: object
})
----
@ -8591,6 +8524,9 @@ _Default:_ `5`
|`rest_total_hits_as_int` or `restTotalHitsAsInt`
|`boolean` - Indicates whether hits.total should be rendered as an integer or an object in the rest search response
|`min_compatible_shard_node` or `minCompatibleShardNode`
|`string` - The minimum compatible version that all shards involved in search should have for this request to be successful
|`body`
|`object` - The search definition using the Query DSL
@ -10045,6 +9981,82 @@ _Default:_ `true`
|===
[discrete]
=== textStructure.findStructure
*Stability:* experimental
[source,ts]
----
client.textStructure.findStructure({
lines_to_sample: number,
line_merge_size_limit: number,
timeout: string,
charset: string,
format: 'ndjson' | 'xml' | 'delimited' | 'semi_structured_text',
has_header_row: boolean,
column_names: string | string[],
delimiter: string,
quote: string,
should_trim_fields: boolean,
grok_pattern: string,
timestamp_field: string,
timestamp_format: string,
explain: boolean,
body: object
})
----
link:{ref}/find-structure.html[Documentation] +
[cols=2*]
|===
|`lines_to_sample` or `linesToSample`
|`number` - How many lines of the file should be included in the analysis +
_Default:_ `1000`
|`line_merge_size_limit` or `lineMergeSizeLimit`
|`number` - Maximum number of characters permitted in a single message when lines are merged to create messages. +
_Default:_ `10000`
|`timeout`
|`string` - Timeout after which the analysis will be aborted +
_Default:_ `25s`
|`charset`
|`string` - Optional parameter to specify the character set of the file
|`format`
|`'ndjson' \| 'xml' \| 'delimited' \| 'semi_structured_text'` - Optional parameter to specify the high level file format
|`has_header_row` or `hasHeaderRow`
|`boolean` - Optional parameter to specify whether a delimited file includes the column names in its first row
|`column_names` or `columnNames`
|`string \| string[]` - Optional parameter containing a comma separated list of the column names for a delimited file
|`delimiter`
|`string` - Optional parameter to specify the delimiter character for a delimited file - must be a single character
|`quote`
|`string` - Optional parameter to specify the quote character for a delimited file - must be a single character
|`should_trim_fields` or `shouldTrimFields`
|`boolean` - Optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them
|`grok_pattern` or `grokPattern`
|`string` - Optional parameter to specify the Grok pattern that should be used to extract fields from messages in a semi-structured text file
|`timestamp_field` or `timestampField`
|`string` - Optional parameter to specify the timestamp field in the file
|`timestamp_format` or `timestampFormat`
|`string` - Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format
|`explain`
|`boolean` - Whether to include a commentary on how the structure was derived
|`body`
|`object` - The contents of the file to be analyzed
|===
[discrete]
=== transform.deleteTransform

28
index.d.ts vendored
View File

@ -1397,14 +1397,6 @@ declare class Client {
explainDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
explainDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.MlExplainDataFrameAnalytics<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
explainDataFrameAnalytics<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.MlExplainDataFrameAnalytics<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
find_file_structure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params?: RequestParams.MlFindFileStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
find_file_structure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
find_file_structure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params: RequestParams.MlFindFileStructure<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
find_file_structure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params: RequestParams.MlFindFileStructure<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findFileStructure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params?: RequestParams.MlFindFileStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
findFileStructure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findFileStructure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params: RequestParams.MlFindFileStructure<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findFileStructure<TResponse = Record<string, any>, TRequestBody extends RequestNDBody = Record<string, any>[], TContext = Context>(params: RequestParams.MlFindFileStructure<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
flush_job<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.MlFlushJob<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
flush_job<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
flush_job<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.MlFlushJob<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
@ -2396,6 +2388,26 @@ declare class Client {
termvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
termvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Termvectors<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
termvectors<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.Termvectors<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
text_structure: {
find_structure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.TextStructureFindStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
find_structure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
find_structure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.TextStructureFindStructure<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
find_structure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.TextStructureFindStructure<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.TextStructureFindStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.TextStructureFindStructure<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.TextStructureFindStructure<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
}
textStructure: {
find_structure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.TextStructureFindStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
find_structure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
find_structure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.TextStructureFindStructure<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
find_structure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.TextStructureFindStructure<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params?: RequestParams.TextStructureFindStructure<TRequestBody>, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.TextStructureFindStructure<TRequestBody>, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
findStructure<TResponse = Record<string, any>, TRequestBody extends RequestBody = Record<string, any>, TContext = Context>(params: RequestParams.TextStructureFindStructure<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>): TransportRequestCallback
}
transform: {
delete_transform<TResponse = Record<string, any>, TContext = Context>(params?: RequestParams.TransformDeleteTransform, options?: TransportRequestOptions): TransportRequestPromise<ApiResponse<TResponse, TContext>>
delete_transform<TResponse = Record<string, any>, TContext = Context>(callback: callbackFn<TResponse, TContext>): TransportRequestCallback