This commit is contained in:
delvedor
2019-07-04 14:43:15 +02:00
35 changed files with 1107 additions and 89 deletions

View File

@ -1,5 +1,6 @@
---
ELASTICSEARCH_VERSION:
- 7.2.0
- 7.1.0
- 7.0.0

View File

@ -39,6 +39,7 @@ function buildCatIndices (opts) {
* @param {boolean} pri - Set to true to return stats only for primary shards
* @param {list} s - Comma-separated list of column names or column aliases to sort by
* @param {boolean} v - Verbose mode. Display column headers
* @param {boolean} include_unloaded_segments - If set to true segment stats will include stats for segments that are not currently loaded into memory
*/
const acceptedQuerystring = [
@ -52,6 +53,7 @@ function buildCatIndices (opts) {
'pri',
's',
'v',
'include_unloaded_segments',
'pretty',
'human',
'error_trace',
@ -61,6 +63,7 @@ function buildCatIndices (opts) {
const snakeCase = {
masterTimeout: 'master_timeout',
includeUnloadedSegments: 'include_unloaded_segments',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}

View File

@ -29,6 +29,7 @@ function buildClusterHealth (opts) {
* Perform a [cluster.health](http://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-health.html) request
*
* @param {list} index - Limit the information returned to a specific index
* @param {enum} expand_wildcards - Whether to expand wildcard expression to concrete indices that are open, closed or both.
* @param {enum} level - Specify the level of detail for returned information
* @param {boolean} local - Return local information, do not retrieve the state from master node (default: false)
* @param {time} master_timeout - Explicit operation timeout for connection to master node
@ -42,6 +43,7 @@ function buildClusterHealth (opts) {
*/
const acceptedQuerystring = [
'expand_wildcards',
'level',
'local',
'master_timeout',
@ -60,6 +62,7 @@ function buildClusterHealth (opts) {
]
const snakeCase = {
expandWildcards: 'expand_wildcards',
masterTimeout: 'master_timeout',
waitForActiveShards: 'wait_for_active_shards',
waitForNodes: 'wait_for_nodes',

View File

@ -32,7 +32,6 @@ function buildCreate (opts) {
* @param {string} index - The name of the index
* @param {string} type - The type of the document
* @param {string} wait_for_active_shards - Sets the number of shard copies that must be active before proceeding with the index operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)
* @param {string} parent - ID of the parent document
* @param {enum} refresh - If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes.
* @param {string} routing - Specific routing value
* @param {time} timeout - Explicit operation timeout
@ -44,7 +43,6 @@ function buildCreate (opts) {
const acceptedQuerystring = [
'wait_for_active_shards',
'parent',
'refresh',
'routing',
'timeout',

View File

@ -0,0 +1,100 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildDataFrameDeleteDataFrameTransform (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
/**
* Perform a [data_frame.delete_data_frame_transform](https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-data-frame-transform.html) request
*
* @param {string} transform_id - The id of the transform to delete
*/
const acceptedQuerystring = [
]
const snakeCase = {
}
return function dataFrameDeleteDataFrameTransform (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['transform_id'] == null && params['transformId'] == null) {
const err = new ConfigurationError('Missing required parameter: transform_id or transformId')
return handleError(err, callback)
}
if (params.body != null) {
const err = new ConfigurationError('This API does not require a body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, transformId, transform_id, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
if (method == null) {
method = 'DELETE'
}
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId)
// build request object
const request = {
method,
path,
body: '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildDataFrameDeleteDataFrameTransform

View File

@ -0,0 +1,103 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildDataFrameGetDataFrameTransform (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
/**
* Perform a [data_frame.get_data_frame_transform](https://www.elastic.co/guide/en/elasticsearch/reference/current/get-data-frame-transform.html) request
*
* @param {string} transform_id - The id or comma delimited list of id expressions of the transforms to get, '_all' or '*' implies get all transforms
* @param {int} from - skips a number of transform configs, defaults to 0
* @param {int} size - specifies a max number of transforms to get, defaults to 100
*/
const acceptedQuerystring = [
'from',
'size'
]
const snakeCase = {
}
return function dataFrameGetDataFrameTransform (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params.body != null) {
const err = new ConfigurationError('This API does not require a body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, transformId, transform_id, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
if (method == null) {
method = 'GET'
}
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
if ((transform_id || transformId) != null) {
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId)
} else {
path = '/' + '_data_frame' + '/' + 'transforms'
}
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildDataFrameGetDataFrameTransform

View File

@ -0,0 +1,99 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildDataFrameGetDataFrameTransformStats (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
/**
* Perform a [data_frame.get_data_frame_transform_stats](https://www.elastic.co/guide/en/elasticsearch/reference/current/get-data-frame-transform-stats.html) request
*
* @param {string} transform_id - The id of the transform for which to get stats. '_all' or '*' implies all transforms
* @param {number} from - skips a number of transform stats, defaults to 0
* @param {number} size - specifies a max number of transform stats to get, defaults to 100
*/
const acceptedQuerystring = [
'from',
'size'
]
const snakeCase = {
}
return function dataFrameGetDataFrameTransformStats (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params.body != null) {
const err = new ConfigurationError('This API does not require a body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, transformId, transform_id, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
if (method == null) {
method = 'GET'
}
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_stats'
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildDataFrameGetDataFrameTransformStats

View File

@ -0,0 +1,96 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildDataFramePreviewDataFrameTransform (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
/**
* Perform a [data_frame.preview_data_frame_transform](https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-data-frame-transform.html) request
*
* @param {object} body - The definition for the data_frame transform to preview
*/
const acceptedQuerystring = [
]
const snakeCase = {
}
return function dataFramePreviewDataFrameTransform (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['body'] == null) {
const err = new ConfigurationError('Missing required parameter: body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
if (method == null) {
method = 'POST'
}
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + '_preview'
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildDataFramePreviewDataFrameTransform

View File

@ -0,0 +1,101 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildDataFramePutDataFrameTransform (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
/**
* Perform a [data_frame.put_data_frame_transform](https://www.elastic.co/guide/en/elasticsearch/reference/current/put-data-frame-transform.html) request
*
* @param {string} transform_id - The id of the new transform.
* @param {object} body - The data frame transform definition
*/
const acceptedQuerystring = [
]
const snakeCase = {
}
return function dataFramePutDataFrameTransform (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['transform_id'] == null && params['transformId'] == null) {
const err = new ConfigurationError('Missing required parameter: transform_id or transformId')
return handleError(err, callback)
}
if (params['body'] == null) {
const err = new ConfigurationError('Missing required parameter: body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, transformId, transform_id, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
if (method == null) {
method = 'PUT'
}
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId)
// build request object
const request = {
method,
path,
body: body || '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildDataFramePutDataFrameTransform

View File

@ -0,0 +1,101 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildDataFrameStartDataFrameTransform (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
/**
* Perform a [data_frame.start_data_frame_transform](https://www.elastic.co/guide/en/elasticsearch/reference/current/start-data-frame-transform.html) request
*
* @param {string} transform_id - The id of the transform to start
* @param {time} timeout - Controls the time to wait for the transform to start
*/
const acceptedQuerystring = [
'timeout'
]
const snakeCase = {
}
return function dataFrameStartDataFrameTransform (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['transform_id'] == null && params['transformId'] == null) {
const err = new ConfigurationError('Missing required parameter: transform_id or transformId')
return handleError(err, callback)
}
if (params.body != null) {
const err = new ConfigurationError('This API does not require a body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, transformId, transform_id, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
if (method == null) {
method = 'POST'
}
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_start'
// build request object
const request = {
method,
path,
body: '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildDataFrameStartDataFrameTransform

View File

@ -0,0 +1,104 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildDataFrameStopDataFrameTransform (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
/**
* Perform a [data_frame.stop_data_frame_transform](https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-data-frame-transform.html) request
*
* @param {string} transform_id - The id of the transform to stop
* @param {boolean} wait_for_completion - Whether to wait for the transform to fully stop before returning or not. Default to false
* @param {time} timeout - Controls the time to wait until the transform has stopped. Default to 30 seconds
*/
const acceptedQuerystring = [
'wait_for_completion',
'timeout'
]
const snakeCase = {
waitForCompletion: 'wait_for_completion'
}
return function dataFrameStopDataFrameTransform (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// check required parameters
if (params['transform_id'] == null && params['transformId'] == null) {
const err = new ConfigurationError('Missing required parameter: transform_id or transformId')
return handleError(err, callback)
}
if (params.body != null) {
const err = new ConfigurationError('This API does not require a body')
return handleError(err, callback)
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, transformId, transform_id, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
if (method == null) {
method = 'POST'
}
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_stop'
// build request object
const request = {
method,
path,
body: '',
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildDataFrameStopDataFrameTransform

View File

@ -32,7 +32,6 @@ function buildDelete (opts) {
* @param {string} index - The name of the index
* @param {string} type - The type of the document
* @param {string} wait_for_active_shards - Sets the number of shard copies that must be active before proceeding with the delete operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)
* @param {string} parent - ID of parent document
* @param {enum} refresh - If `true` then refresh the effected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes.
* @param {string} routing - Specific routing value
* @param {time} timeout - Explicit operation timeout
@ -44,7 +43,6 @@ function buildDelete (opts) {
const acceptedQuerystring = [
'wait_for_active_shards',
'parent',
'refresh',
'routing',
'timeout',

View File

@ -32,7 +32,6 @@ function buildExists (opts) {
* @param {string} index - The name of the index
* @param {string} type - The type of the document (use `_all` to fetch the first document matching the ID across all types)
* @param {list} stored_fields - A comma-separated list of stored fields to return in the response
* @param {string} parent - The ID of the parent document
* @param {string} preference - Specify the node or shard the operation should be performed on (default: random)
* @param {boolean} realtime - Specify whether to perform the operation in realtime or search mode
* @param {boolean} refresh - Refresh the shard containing the document before performing the operation
@ -46,7 +45,6 @@ function buildExists (opts) {
const acceptedQuerystring = [
'stored_fields',
'parent',
'preference',
'realtime',
'refresh',

View File

@ -31,7 +31,6 @@ function buildExistsSource (opts) {
* @param {string} id - The document ID
* @param {string} index - The name of the index
* @param {string} type - The type of the document; deprecated and optional starting with 7.0
* @param {string} parent - The ID of the parent document
* @param {string} preference - Specify the node or shard the operation should be performed on (default: random)
* @param {boolean} realtime - Specify whether to perform the operation in realtime or search mode
* @param {boolean} refresh - Refresh the shard containing the document before performing the operation
@ -44,7 +43,6 @@ function buildExistsSource (opts) {
*/
const acceptedQuerystring = [
'parent',
'preference',
'realtime',
'refresh',

View File

@ -37,7 +37,6 @@ function buildExplain (opts) {
* @param {string} df - The default field for query string query (default: _all)
* @param {list} stored_fields - A comma-separated list of stored fields to return in the response
* @param {boolean} lenient - Specify whether format-based query failures (such as providing text to a numeric field) should be ignored
* @param {string} parent - The ID of the parent document
* @param {string} preference - Specify the node or shard the operation should be performed on (default: random)
* @param {string} q - Query in the Lucene query string syntax
* @param {string} routing - Specific routing value
@ -54,7 +53,6 @@ function buildExplain (opts) {
'df',
'stored_fields',
'lenient',
'parent',
'preference',
'q',
'routing',

View File

@ -33,6 +33,7 @@ function buildFieldCaps (opts) {
* @param {boolean} ignore_unavailable - Whether specified concrete indices should be ignored when unavailable (missing or closed)
* @param {boolean} allow_no_indices - Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
* @param {enum} expand_wildcards - Whether to expand wildcard expression to concrete indices that are open, closed or both.
* @param {boolean} include_unmapped - Indicates whether unmapped fields should be included in the response.
*/
const acceptedQuerystring = [
@ -40,6 +41,7 @@ function buildFieldCaps (opts) {
'ignore_unavailable',
'allow_no_indices',
'expand_wildcards',
'include_unmapped',
'pretty',
'human',
'error_trace',
@ -51,6 +53,7 @@ function buildFieldCaps (opts) {
ignoreUnavailable: 'ignore_unavailable',
allowNoIndices: 'allow_no_indices',
expandWildcards: 'expand_wildcards',
includeUnmapped: 'include_unmapped',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}

View File

@ -32,7 +32,6 @@ function buildGet (opts) {
* @param {string} index - The name of the index
* @param {string} type - The type of the document (use `_all` to fetch the first document matching the ID across all types)
* @param {list} stored_fields - A comma-separated list of stored fields to return in the response
* @param {string} parent - The ID of the parent document
* @param {string} preference - Specify the node or shard the operation should be performed on (default: random)
* @param {boolean} realtime - Specify whether to perform the operation in realtime or search mode
* @param {boolean} refresh - Refresh the shard containing the document before performing the operation
@ -40,15 +39,12 @@ function buildGet (opts) {
* @param {list} _source - True or false to return the _source field or not, or a list of fields to return
* @param {list} _source_excludes - A list of fields to exclude from the returned _source field
* @param {list} _source_includes - A list of fields to extract and return from the _source field
* @param {list} _source_exclude - A list of fields to exclude from the returned _source field
* @param {list} _source_include - A list of fields to extract and return from the _source field
* @param {number} version - Explicit version number for concurrency control
* @param {enum} version_type - Specific version type
*/
const acceptedQuerystring = [
'stored_fields',
'parent',
'preference',
'realtime',
'refresh',
@ -58,8 +54,6 @@ function buildGet (opts) {
'_source_exclude',
'_source_includes',
'_source_include',
'_source_exclude',
'_source_include',
'version',
'version_type',
'pretty',

View File

@ -31,7 +31,6 @@ function buildGetSource (opts) {
* @param {string} id - The document ID
* @param {string} index - The name of the index
* @param {string} type - The type of the document; deprecated and optional starting with 7.0
* @param {string} parent - The ID of the parent document
* @param {string} preference - Specify the node or shard the operation should be performed on (default: random)
* @param {boolean} realtime - Specify whether to perform the operation in realtime or search mode
* @param {boolean} refresh - Refresh the shard containing the document before performing the operation
@ -44,7 +43,6 @@ function buildGetSource (opts) {
*/
const acceptedQuerystring = [
'parent',
'preference',
'realtime',
'refresh',

View File

@ -33,7 +33,6 @@ function buildIndex (opts) {
* @param {string} type - The type of the document
* @param {string} wait_for_active_shards - Sets the number of shard copies that must be active before proceeding with the index operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)
* @param {enum} op_type - Explicit operation type
* @param {string} parent - ID of the parent document
* @param {enum} refresh - If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes.
* @param {string} routing - Specific routing value
* @param {time} timeout - Explicit operation timeout
@ -48,7 +47,6 @@ function buildIndex (opts) {
const acceptedQuerystring = [
'wait_for_active_shards',
'op_type',
'parent',
'refresh',
'routing',
'timeout',

View File

@ -34,6 +34,7 @@ function buildIndicesClose (opts) {
* @param {boolean} ignore_unavailable - Whether specified concrete indices should be ignored when unavailable (missing or closed)
* @param {boolean} allow_no_indices - Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
* @param {enum} expand_wildcards - Whether to expand wildcard expression to concrete indices that are open, closed or both.
* @param {string} wait_for_active_shards - Sets the number of active shards to wait for before the operation returns.
*/
const acceptedQuerystring = [
@ -42,6 +43,7 @@ function buildIndicesClose (opts) {
'ignore_unavailable',
'allow_no_indices',
'expand_wildcards',
'wait_for_active_shards',
'pretty',
'human',
'error_trace',
@ -54,6 +56,7 @@ function buildIndicesClose (opts) {
ignoreUnavailable: 'ignore_unavailable',
allowNoIndices: 'allow_no_indices',
expandWildcards: 'expand_wildcards',
waitForActiveShards: 'wait_for_active_shards',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}

View File

@ -110,14 +110,14 @@ function buildIndicesPutMapping (opts) {
path = '/' + encodeURIComponent(index) + '/' + encodeURIComponent(type) + '/' + '_mappings'
} else if ((index) != null && (type) != null) {
path = '/' + encodeURIComponent(index) + '/' + '_mappings' + '/' + encodeURIComponent(type)
} else if ((type) != null) {
path = '/' + '_mapping' + '/' + encodeURIComponent(type)
} else if ((type) != null) {
path = '/' + '_mappings' + '/' + encodeURIComponent(type)
} else if ((type) != null) {
path = '/' + '_mapping' + '/' + encodeURIComponent(type)
} else if ((index) != null) {
path = '/' + encodeURIComponent(index) + '/' + '_mappings'
} else {
path = '/' + encodeURIComponent(index) + '/' + '_mapping'
} else {
path = '/' + encodeURIComponent(index) + '/' + '_mappings'
}
// build request object

View File

@ -37,6 +37,9 @@ function buildIndicesStats (opts) {
* @param {enum} level - Return stats aggregated at cluster, index or shard level
* @param {list} types - A comma-separated list of document types for the `indexing` index metric
* @param {boolean} include_segment_file_sizes - Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested)
* @param {boolean} include_unloaded_segments - If set to true segment stats will include stats for segments that are not currently loaded into memory
* @param {enum} expand_wildcards - Whether to expand wildcard expression to concrete indices that are open, closed or both.
* @param {boolean} forbid_closed_indices - If set to false stats will also collected from closed indices if explicitly specified or if expand_wildcards expands to closed indices
*/
const acceptedQuerystring = [
@ -47,6 +50,9 @@ function buildIndicesStats (opts) {
'level',
'types',
'include_segment_file_sizes',
'include_unloaded_segments',
'expand_wildcards',
'forbid_closed_indices',
'pretty',
'human',
'error_trace',
@ -58,6 +64,9 @@ function buildIndicesStats (opts) {
completionFields: 'completion_fields',
fielddataFields: 'fielddata_fields',
includeSegmentFileSizes: 'include_segment_file_sizes',
includeUnloadedSegments: 'include_unloaded_segments',
expandWildcards: 'expand_wildcards',
forbidClosedIndices: 'forbid_closed_indices',
errorTrace: 'error_trace',
filterPath: 'filter_path'
}

View File

@ -34,7 +34,7 @@ function buildMsearch (opts) {
* @param {number} max_concurrent_searches - Controls the maximum number of concurrent searches the multi search api will execute
* @param {boolean} typed_keys - Specify whether aggregation and suggester names should be prefixed by their respective types in the response
* @param {number} pre_filter_shard_size - A threshold that enforces a pre-filter roundtrip to prefilter search shards based on query rewriting if the number of shards the search request expands to exceeds the threshold. This filter roundtrip can limit the number of shards significantly if for instance a shard can not match any documents based on it's rewrite method ie. if date filters are mandatory to match but the shard bounds and the query are disjoint.
* @param {number} max_concurrent_shard_requests - The number of concurrent shard requests each sub search executes concurrently. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests
* @param {number} max_concurrent_shard_requests - The number of concurrent shard requests each sub search executes concurrently per node. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests
* @param {boolean} rest_total_hits_as_int - Indicates whether hits.total should be rendered as an integer or an object in the rest search response
* @param {boolean} ccs_minimize_roundtrips - Indicates whether network round-trips should be minimized as part of cross-cluster search requests execution
* @param {object} body - The request definitions (metadata-search request definition pairs), separated by newlines

View File

@ -39,7 +39,6 @@ function buildMtermvectors (opts) {
* @param {boolean} payloads - Specifies if term payloads should be returned. Applies to all returned documents unless otherwise specified in body "params" or "docs".
* @param {string} preference - Specify the node or shard the operation should be performed on (default: random) .Applies to all returned documents unless otherwise specified in body "params" or "docs".
* @param {string} routing - Specific routing value. Applies to all returned documents unless otherwise specified in body "params" or "docs".
* @param {string} parent - Parent id of documents. Applies to all returned documents unless otherwise specified in body "params" or "docs".
* @param {boolean} realtime - Specifies if requests are real-time as opposed to near-real-time (default: true).
* @param {number} version - Explicit version number for concurrency control
* @param {enum} version_type - Specific version type
@ -56,7 +55,6 @@ function buildMtermvectors (opts) {
'payloads',
'preference',
'routing',
'parent',
'realtime',
'version',
'version_type',

View File

@ -100,12 +100,12 @@ function buildNodesHotThreads (opts) {
path = '/' + '_cluster' + '/' + 'nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hotthreads'
} else if ((node_id || nodeId) != null) {
path = '/' + '_cluster' + '/' + 'nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hot_threads'
} else if ((node_id || nodeId) != null) {
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hotthreads'
} else if ((node_id || nodeId) != null) {
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hot_threads'
} else if ((node_id || nodeId) != null) {
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hotthreads'
} else {
path = '/' + '_cluster' + '/' + 'nodes' + '/' + 'hotthreads'
path = '/' + '_nodes' + '/' + 'hot_threads'
}
// build request object

View File

@ -0,0 +1,96 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict'
/* eslint camelcase: 0 */
/* eslint no-unused-vars: 0 */
function buildScriptsPainlessContext (opts) {
// eslint-disable-next-line no-unused-vars
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
/**
* Perform a [scripts_painless_context](undefined) request
*
* @param {string} context - Select a specific context to retrieve API information about
*/
const acceptedQuerystring = [
'context',
'pretty',
'human',
'error_trace',
'source',
'filter_path'
]
const snakeCase = {
errorTrace: 'error_trace',
filterPath: 'filter_path'
}
return function scriptsPainlessContext (params, options, callback) {
options = options || {}
if (typeof options === 'function') {
callback = options
options = {}
}
if (typeof params === 'function' || params == null) {
callback = params
params = {}
options = {}
}
// validate headers object
if (options.headers != null && typeof options.headers !== 'object') {
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
return handleError(err, callback)
}
var warnings = []
var { method, body, ...querystring } = params
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
if (method == null) {
method = 'GET'
}
var ignore = options.ignore
if (typeof ignore === 'number') {
options.ignore = [ignore]
}
var path = ''
path = '/' + '_scripts' + '/' + 'painless' + '/' + '_context'
// build request object
const request = {
method,
path,
body: null,
querystring
}
options.warnings = warnings.length === 0 ? null : warnings
return makeRequest(request, options, callback)
}
}
module.exports = buildScriptsPainlessContext

View File

@ -30,7 +30,7 @@ function buildSecurityPutRoleMapping (opts) {
*
* @param {string} name - Role-mapping name
* @param {enum} refresh - If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes.
* @param {object} body - The role to add
* @param {object} body - The role mapping to add
*/
const acceptedQuerystring = [

View File

@ -39,7 +39,6 @@ function buildTermvectors (opts) {
* @param {boolean} payloads - Specifies if term payloads should be returned.
* @param {string} preference - Specify the node or shard the operation should be performed on (default: random).
* @param {string} routing - Specific routing value.
* @param {string} parent - Parent id of documents.
* @param {boolean} realtime - Specifies if request is real-time as opposed to near-real-time (default: true).
* @param {number} version - Explicit version number for concurrency control
* @param {enum} version_type - Specific version type
@ -55,7 +54,6 @@ function buildTermvectors (opts) {
'payloads',
'preference',
'routing',
'parent',
'realtime',
'version',
'version_type',

View File

@ -36,7 +36,6 @@ function buildUpdate (opts) {
* @param {list} _source_excludes - A list of fields to exclude from the returned _source field
* @param {list} _source_includes - A list of fields to extract and return from the _source field
* @param {string} lang - The script language (default: painless)
* @param {string} parent - ID of the parent document. Is is only used for routing and when for the upsert request
* @param {enum} refresh - If `true` then refresh the effected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes.
* @param {number} retry_on_conflict - Specify how many times should the operation be retried when a conflict occurs (default: 0)
* @param {string} routing - Specific routing value
@ -54,7 +53,6 @@ function buildUpdate (opts) {
'_source_includes',
'_source_include',
'lang',
'parent',
'refresh',
'retry_on_conflict',
'routing',

View File

@ -97,6 +97,38 @@ function ESAPI (opts) {
},
count: lazyLoad('count', opts),
create: lazyLoad('create', opts),
data_frame: {
delete_data_frame_transform: lazyLoad('data_frame.delete_data_frame_transform', opts),
deleteDataFrameTransform: lazyLoad('data_frame.delete_data_frame_transform', opts),
get_data_frame_transform: lazyLoad('data_frame.get_data_frame_transform', opts),
getDataFrameTransform: lazyLoad('data_frame.get_data_frame_transform', opts),
get_data_frame_transform_stats: lazyLoad('data_frame.get_data_frame_transform_stats', opts),
getDataFrameTransformStats: lazyLoad('data_frame.get_data_frame_transform_stats', opts),
preview_data_frame_transform: lazyLoad('data_frame.preview_data_frame_transform', opts),
previewDataFrameTransform: lazyLoad('data_frame.preview_data_frame_transform', opts),
put_data_frame_transform: lazyLoad('data_frame.put_data_frame_transform', opts),
putDataFrameTransform: lazyLoad('data_frame.put_data_frame_transform', opts),
start_data_frame_transform: lazyLoad('data_frame.start_data_frame_transform', opts),
startDataFrameTransform: lazyLoad('data_frame.start_data_frame_transform', opts),
stop_data_frame_transform: lazyLoad('data_frame.stop_data_frame_transform', opts),
stopDataFrameTransform: lazyLoad('data_frame.stop_data_frame_transform', opts)
},
dataFrame: {
delete_data_frame_transform: lazyLoad('data_frame.delete_data_frame_transform', opts),
deleteDataFrameTransform: lazyLoad('data_frame.delete_data_frame_transform', opts),
get_data_frame_transform: lazyLoad('data_frame.get_data_frame_transform', opts),
getDataFrameTransform: lazyLoad('data_frame.get_data_frame_transform', opts),
get_data_frame_transform_stats: lazyLoad('data_frame.get_data_frame_transform_stats', opts),
getDataFrameTransformStats: lazyLoad('data_frame.get_data_frame_transform_stats', opts),
preview_data_frame_transform: lazyLoad('data_frame.preview_data_frame_transform', opts),
previewDataFrameTransform: lazyLoad('data_frame.preview_data_frame_transform', opts),
put_data_frame_transform: lazyLoad('data_frame.put_data_frame_transform', opts),
putDataFrameTransform: lazyLoad('data_frame.put_data_frame_transform', opts),
start_data_frame_transform: lazyLoad('data_frame.start_data_frame_transform', opts),
startDataFrameTransform: lazyLoad('data_frame.start_data_frame_transform', opts),
stop_data_frame_transform: lazyLoad('data_frame.stop_data_frame_transform', opts),
stopDataFrameTransform: lazyLoad('data_frame.stop_data_frame_transform', opts)
},
delete: lazyLoad('delete', opts),
delete_by_query: lazyLoad('delete_by_query', opts),
deleteByQuery: lazyLoad('delete_by_query', opts),
@ -363,6 +395,8 @@ function ESAPI (opts) {
stop_job: lazyLoad('rollup.stop_job', opts),
stopJob: lazyLoad('rollup.stop_job', opts)
},
scripts_painless_context: lazyLoad('scripts_painless_context', opts),
scriptsPainlessContext: lazyLoad('scripts_painless_context', opts),
scripts_painless_execute: lazyLoad('scripts_painless_execute', opts),
scriptsPainlessExecute: lazyLoad('scripts_painless_execute', opts),
scroll: lazyLoad('scroll', opts),

View File

@ -117,6 +117,7 @@ export interface CatIndices extends Generic {
pri?: boolean;
s?: string | string[];
v?: boolean;
include_unloaded_segments?: boolean;
}
export interface CatMaster extends Generic {
@ -279,6 +280,7 @@ export interface ClusterGetSettings extends Generic {
export interface ClusterHealth extends Generic {
index?: string | string[];
expand_wildcards?: 'open' | 'closed' | 'none' | 'all';
level?: 'cluster' | 'indices' | 'shards';
local?: boolean;
master_timeout?: string;
@ -360,7 +362,6 @@ export interface Create<T = any> extends Generic {
index: string;
type?: string;
wait_for_active_shards?: string;
parent?: string;
refresh?: 'true' | 'false' | 'wait_for';
routing?: string;
timeout?: string;
@ -375,7 +376,6 @@ export interface Delete extends Generic {
index: string;
type?: string;
wait_for_active_shards?: string;
parent?: string;
refresh?: 'true' | 'false' | 'wait_for';
routing?: string;
timeout?: string;
@ -443,7 +443,6 @@ export interface Exists extends Generic {
_source_exclude?: string | string[];
_source_include?: string | string[];
stored_fields?: string | string[];
parent?: string;
preference?: string;
realtime?: boolean;
refresh?: boolean;
@ -461,7 +460,6 @@ export interface ExistsSource extends Generic {
type?: string;
_source_exclude?: string | string[];
_source_include?: string | string[];
parent?: string;
preference?: string;
realtime?: boolean;
refresh?: boolean;
@ -485,7 +483,6 @@ export interface Explain<T = any> extends Generic {
df?: string;
stored_fields?: string | string[];
lenient?: boolean;
parent?: string;
preference?: string;
q?: string;
routing?: string;
@ -501,6 +498,7 @@ export interface FieldCaps extends Generic {
ignore_unavailable?: boolean;
allow_no_indices?: boolean;
expand_wildcards?: 'open' | 'closed' | 'none' | 'all';
include_unmapped?: boolean;
}
export interface Get extends Generic {
@ -510,7 +508,6 @@ export interface Get extends Generic {
_source_exclude?: string | string[];
_source_include?: string | string[];
stored_fields?: string | string[];
parent?: string;
preference?: string;
realtime?: boolean;
refresh?: boolean;
@ -533,7 +530,6 @@ export interface GetSource extends Generic {
type?: string;
_source_exclude?: string | string[];
_source_include?: string | string[];
parent?: string;
preference?: string;
realtime?: boolean;
refresh?: boolean;
@ -551,7 +547,6 @@ export interface Index<T = any> extends Generic {
type?: string;
wait_for_active_shards?: string;
op_type?: 'index' | 'create';
parent?: string;
refresh?: 'true' | 'false' | 'wait_for';
routing?: string;
timeout?: string;
@ -586,6 +581,7 @@ export interface IndicesClose extends Generic {
ignore_unavailable?: boolean;
allow_no_indices?: boolean;
expand_wildcards?: 'open' | 'closed' | 'none' | 'all';
wait_for_active_shards?: string;
}
export interface IndicesCreate<T = any> extends Generic {
@ -874,6 +870,9 @@ export interface IndicesStats extends Generic {
level?: 'cluster' | 'indices' | 'shards';
types?: string | string[];
include_segment_file_sizes?: boolean;
include_unloaded_segments?: boolean;
expand_wildcards?: 'open' | 'closed' | 'none' | 'all';
forbid_closed_indices?: boolean;
}
export interface IndicesUpdateAliases<T = any> extends Generic {
@ -991,7 +990,6 @@ export interface Mtermvectors<T = any> extends Generic {
payloads?: boolean;
preference?: string;
routing?: string;
parent?: string;
realtime?: boolean;
version?: number;
version_type?: 'internal' | 'external' | 'external_gte' | 'force';
@ -1080,6 +1078,10 @@ export interface RenderSearchTemplate<T = any> extends Generic {
body?: T;
}
export interface ScriptsPainlessContext extends Generic {
context?: string;
}
export interface ScriptsPainlessExecute<T = any> extends Generic {
body?: T;
}
@ -1268,7 +1270,6 @@ export interface Termvectors<T = any> extends Generic {
payloads?: boolean;
preference?: string;
routing?: string;
parent?: string;
realtime?: boolean;
version?: number;
version_type?: 'internal' | 'external' | 'external_gte' | 'force';
@ -1286,7 +1287,6 @@ export interface Update<T = any> extends Generic {
_source_excludes?: string | string[];
_source_includes?: string | string[];
lang?: string;
parent?: string;
refresh?: 'true' | 'false' | 'wait_for';
retry_on_conflict?: number;
routing?: string;
@ -1391,6 +1391,42 @@ export interface CcrUnfollow extends Generic {
index: string;
}
export interface DataFrameDeleteDataFrameTransform extends Generic {
transform_id: string;
}
export interface DataFrameGetDataFrameTransform extends Generic {
transform_id?: string;
from?: number;
size?: number;
}
export interface DataFrameGetDataFrameTransformStats extends Generic {
transform_id?: string;
from?: number;
size?: number;
}
export interface DataFramePreviewDataFrameTransform<T = any> extends Generic {
body: T;
}
export interface DataFramePutDataFrameTransform<T = any> extends Generic {
transform_id: string;
body: T;
}
export interface DataFrameStartDataFrameTransform extends Generic {
transform_id: string;
timeout?: string;
}
export interface DataFrameStopDataFrameTransform extends Generic {
transform_id: string;
wait_for_completion?: boolean;
timeout?: string;
}
export interface GraphExplore<T = any> extends Generic {
index?: string | string[];
type?: string | string[];

View File

@ -314,6 +314,9 @@ link:{ref}/cat-indices.html[Reference]
|`v`
|`boolean` - Verbose mode. Display column headers
|`include_unloaded_segments` or `includeUnloadedSegments`
|`boolean` - If set to true segment stats will include stats for segments that are not currently loaded into memory
|===
=== cat.master
@ -817,6 +820,10 @@ link:{ref}/cluster-health.html[Reference]
|`index`
|`string, string[]` - Limit the information returned to a specific index
|`expand_wildcards` or `expandWildcards`
|`'open', 'closed', 'none', 'all'` - Whether to expand wildcard expression to concrete indices that are open, closed or both. +
_Default:_ `all`
|`level`
|`'cluster', 'indices', 'shards'` - Specify the level of detail for returned information +
_Default:_ `cluster`
@ -1070,9 +1077,6 @@ link:{ref}/docs-index_.html[Reference]
|`wait_for_active_shards` or `waitForActiveShards`
|`string` - Sets the number of shard copies that must be active before proceeding with the index operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)
|`parent`
|`string` - ID of the parent document
|`refresh`
|`'true', 'false', 'wait_for'` - If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes.
@ -1116,9 +1120,6 @@ link:{ref}/docs-delete.html[Reference]
|`wait_for_active_shards` or `waitForActiveShards`
|`string` - Sets the number of shard copies that must be active before proceeding with the delete operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)
|`parent`
|`string` - ID of parent document
|`refresh`
|`'true', 'false', 'wait_for'` - If `true` then refresh the effected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes.
@ -1318,9 +1319,6 @@ link:{ref}/docs-get.html[Reference]
|`stored_fields` or `storedFields`
|`string, string[]` - A comma-separated list of stored fields to return in the response
|`parent`
|`string` - The ID of the parent document
|`preference`
|`string` - Specify the node or shard the operation should be performed on (default: random)
@ -1367,9 +1365,6 @@ link:{ref}/docs-get.html[Reference]
|`type`
|`string` - The type of the document; deprecated and optional starting with 7.0
|`parent`
|`string` - The ID of the parent document
|`preference`
|`string` - Specify the node or shard the operation should be performed on (default: random)
@ -1435,9 +1430,6 @@ _Default:_ `OR`
|`lenient`
|`boolean` - Specify whether format-based query failures (such as providing text to a numeric field) should be ignored
|`parent`
|`string` - The ID of the parent document
|`preference`
|`string` - Specify the node or shard the operation should be performed on (default: random)
@ -1485,6 +1477,9 @@ link:{ref}/search-field-caps.html[Reference]
|`'open', 'closed', 'none', 'all'` - Whether to expand wildcard expression to concrete indices that are open, closed or both. +
_Default:_ `open`
|`include_unmapped` or `includeUnmapped`
|`boolean` - Indicates whether unmapped fields should be included in the response.
|===
=== get
@ -1507,9 +1502,6 @@ link:{ref}/docs-get.html[Reference]
|`stored_fields` or `storedFields`
|`string, string[]` - A comma-separated list of stored fields to return in the response
|`parent`
|`string` - The ID of the parent document
|`preference`
|`string` - Specify the node or shard the operation should be performed on (default: random)
@ -1531,12 +1523,6 @@ link:{ref}/docs-get.html[Reference]
|`_source_includes` or `_sourceIncludes`
|`string, string[]` - A list of fields to extract and return from the _source field
|`_source_exclude` or `_sourceExclude`
|`string, string[]` - A list of fields to exclude from the returned _source field
|`_source_include` or `_sourceInclude`
|`string, string[]` - A list of fields to extract and return from the _source field
|`version`
|`number` - Explicit version number for concurrency control
@ -1578,9 +1564,6 @@ link:{ref}/docs-get.html[Reference]
|`type`
|`string` - The type of the document; deprecated and optional starting with 7.0
|`parent`
|`string` - The ID of the parent document
|`preference`
|`string` - Specify the node or shard the operation should be performed on (default: random)
@ -1634,9 +1617,6 @@ link:{ref}/docs-index_.html[Reference]
|`'index', 'create'` - Explicit operation type +
_Default:_ `index`
|`parent`
|`string` - ID of the parent document
|`refresh`
|`'true', 'false', 'wait_for'` - If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes.
@ -1750,6 +1730,9 @@ link:{ref}/indices-open-close.html[Reference]
|`'open', 'closed', 'none', 'all'` - Whether to expand wildcard expression to concrete indices that are open, closed or both. +
_Default:_ `open`
|`wait_for_active_shards` or `waitForActiveShards`
|`string` - Sets the number of active shards to wait for before the operation returns.
|===
=== indices.create
@ -2665,6 +2648,17 @@ _Default:_ `indices`
|`include_segment_file_sizes` or `includeSegmentFileSizes`
|`boolean` - Whether to report the aggregated disk usage of each one of the Lucene index files (only applies if segment stats are requested)
|`include_unloaded_segments` or `includeUnloadedSegments`
|`boolean` - If set to true segment stats will include stats for segments that are not currently loaded into memory
|`expand_wildcards` or `expandWildcards`
|`'open', 'closed', 'none', 'all'` - Whether to expand wildcard expression to concrete indices that are open, closed or both. +
_Default:_ `open`
|`forbid_closed_indices` or `forbidClosedIndices`
|`boolean` - If set to false stats will also collected from closed indices if explicitly specified or if expand_wildcards expands to closed indices +
_Default:_ `true`
|===
=== indices.updateAliases
@ -2935,8 +2929,8 @@ link:{ref}/search-multi-search.html[Reference]
_Default:_ `128`
|`max_concurrent_shard_requests` or `maxConcurrentShardRequests`
|`number` - The number of concurrent shard requests each sub search executes concurrently. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests +
_Default:_ `The default grows with the number of nodes in the cluster but is at most 256.`
|`number` - The number of concurrent shard requests each sub search executes concurrently per node. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests +
_Default:_ `5`
|`rest_total_hits_as_int` or `restTotalHitsAsInt`
|`boolean` - Indicates whether hits.total should be rendered as an integer or an object in the rest search response
@ -3030,9 +3024,6 @@ _Default:_ `true`
|`routing`
|`string` - Specific routing value. Applies to all returned documents unless otherwise specified in body "params" or "docs".
|`parent`
|`string` - Parent id of documents. Applies to all returned documents unless otherwise specified in body "params" or "docs".
|`realtime`
|`boolean` - Specifies if requests are real-time as opposed to near-real-time (default: true).
@ -3311,6 +3302,18 @@ link:{ref}/search-template.html[Reference]
|===
=== scriptsPainlessContext
[source,js]
----
client.scriptsPainlessContext([params] [, options] [, callback])
----
[cols=2*]
|===
|`context`
|`string` - Select a specific context to retrieve API information about
|===
=== scriptsPainlessExecute
[source,js]
----
@ -3488,7 +3491,7 @@ _Default:_ `512`
|`max_concurrent_shard_requests` or `maxConcurrentShardRequests`
|`number` - The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order to limit the number of concurrent shard requests +
_Default:_ `The default is 5.`
_Default:_ `5`
|`pre_filter_shard_size` or `preFilterShardSize`
|`number` - A threshold that enforces a pre-filter roundtrip to prefilter search shards based on query rewriting if the number of shards the search request expands to exceeds the threshold. This filter roundtrip can limit the number of shards significantly if for instance a shard can not match any documents based on it's rewrite method ie. if date filters are mandatory to match but the shard bounds and the query are disjoint. +
@ -3910,9 +3913,6 @@ _Default:_ `true`
|`routing`
|`string` - Specific routing value.
|`parent`
|`string` - Parent id of documents.
|`realtime`
|`boolean` - Specifies if request is real-time as opposed to near-real-time (default: true).
@ -3959,9 +3959,6 @@ link:{ref}/docs-update.html[Reference]
|`lang`
|`string` - The script language (default: painless)
|`parent`
|`string` - ID of the parent document. Is is only used for routing and when for the upsert request
|`refresh`
|`'true', 'false', 'wait_for'` - If `true` then refresh the effected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then do nothing with refreshes.
@ -4282,6 +4279,121 @@ link:http://www.elastic.co/guide/en/elasticsearch/reference/current[Reference]
|===
=== dataFrame.deleteDataFrameTransform
[source,js]
----
client.dataFrame.deleteDataFrameTransform([params] [, options] [, callback])
----
link:{ref}/delete-data-frame-transform.html[Reference]
[cols=2*]
|===
|`transform_id` or `transformId`
|`string` - The id of the transform to delete
|===
=== dataFrame.getDataFrameTransform
[source,js]
----
client.dataFrame.getDataFrameTransform([params] [, options] [, callback])
----
link:{ref}/get-data-frame-transform.html[Reference]
[cols=2*]
|===
|`transform_id` or `transformId`
|`string` - The id or comma delimited list of id expressions of the transforms to get, '_all' or '*' implies get all transforms
|`from`
|`number` - skips a number of transform configs, defaults to 0
|`size`
|`number` - specifies a max number of transforms to get, defaults to 100
|===
=== dataFrame.getDataFrameTransformStats
[source,js]
----
client.dataFrame.getDataFrameTransformStats([params] [, options] [, callback])
----
link:{ref}/get-data-frame-transform-stats.html[Reference]
[cols=2*]
|===
|`transform_id` or `transformId`
|`string` - The id of the transform for which to get stats. '_all' or '*' implies all transforms
|`from`
|`number` - skips a number of transform stats, defaults to 0
|`size`
|`number` - specifies a max number of transform stats to get, defaults to 100
|===
=== dataFrame.previewDataFrameTransform
[source,js]
----
client.dataFrame.previewDataFrameTransform([params] [, options] [, callback])
----
link:{ref}/preview-data-frame-transform.html[Reference]
[cols=2*]
|===
|`body`
|`object` - The definition for the data_frame transform to preview
|===
=== dataFrame.putDataFrameTransform
[source,js]
----
client.dataFrame.putDataFrameTransform([params] [, options] [, callback])
----
link:{ref}/put-data-frame-transform.html[Reference]
[cols=2*]
|===
|`transform_id` or `transformId`
|`string` - The id of the new transform.
|`body`
|`object` - The data frame transform definition
|===
=== dataFrame.startDataFrameTransform
[source,js]
----
client.dataFrame.startDataFrameTransform([params] [, options] [, callback])
----
link:{ref}/start-data-frame-transform.html[Reference]
[cols=2*]
|===
|`transform_id` or `transformId`
|`string` - The id of the transform to start
|`timeout`
|`string` - Controls the time to wait for the transform to start
|===
=== dataFrame.stopDataFrameTransform
[source,js]
----
client.dataFrame.stopDataFrameTransform([params] [, options] [, callback])
----
link:{ref}/stop-data-frame-transform.html[Reference]
[cols=2*]
|===
|`transform_id` or `transformId`
|`string` - The id of the transform to stop
|`wait_for_completion` or `waitForCompletion`
|`boolean` - Whether to wait for the transform to fully stop before returning or not. Default to false
|`timeout`
|`string` - Controls the time to wait until the transform has stopped. Default to 30 seconds
|===
=== graph.explore
[source,js]
----
@ -6036,7 +6148,7 @@ link:{ref}/security-api-put-role-mapping.html[Reference]
|`'true', 'false', 'wait_for'` - If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes.
|`body`
|`object` - The role to add
|`object` - The role mapping to add
|===

34
index.d.ts vendored
View File

@ -192,6 +192,38 @@ declare class Client extends EventEmitter {
}
count: ApiMethod<RequestParams.Count>
create: ApiMethod<RequestParams.Create>
data_frame: {
delete_data_frame_transform: ApiMethod<RequestParams.DataFrameDeleteDataFrameTransform>
deleteDataFrameTransform: ApiMethod<RequestParams.DataFrameDeleteDataFrameTransform>
get_data_frame_transform: ApiMethod<RequestParams.DataFrameGetDataFrameTransform>
getDataFrameTransform: ApiMethod<RequestParams.DataFrameGetDataFrameTransform>
get_data_frame_transform_stats: ApiMethod<RequestParams.DataFrameGetDataFrameTransformStats>
getDataFrameTransformStats: ApiMethod<RequestParams.DataFrameGetDataFrameTransformStats>
preview_data_frame_transform: ApiMethod<RequestParams.DataFramePreviewDataFrameTransform>
previewDataFrameTransform: ApiMethod<RequestParams.DataFramePreviewDataFrameTransform>
put_data_frame_transform: ApiMethod<RequestParams.DataFramePutDataFrameTransform>
putDataFrameTransform: ApiMethod<RequestParams.DataFramePutDataFrameTransform>
start_data_frame_transform: ApiMethod<RequestParams.DataFrameStartDataFrameTransform>
startDataFrameTransform: ApiMethod<RequestParams.DataFrameStartDataFrameTransform>
stop_data_frame_transform: ApiMethod<RequestParams.DataFrameStopDataFrameTransform>
stopDataFrameTransform: ApiMethod<RequestParams.DataFrameStopDataFrameTransform>
}
dataFrame: {
delete_data_frame_transform: ApiMethod<RequestParams.DataFrameDeleteDataFrameTransform>
deleteDataFrameTransform: ApiMethod<RequestParams.DataFrameDeleteDataFrameTransform>
get_data_frame_transform: ApiMethod<RequestParams.DataFrameGetDataFrameTransform>
getDataFrameTransform: ApiMethod<RequestParams.DataFrameGetDataFrameTransform>
get_data_frame_transform_stats: ApiMethod<RequestParams.DataFrameGetDataFrameTransformStats>
getDataFrameTransformStats: ApiMethod<RequestParams.DataFrameGetDataFrameTransformStats>
preview_data_frame_transform: ApiMethod<RequestParams.DataFramePreviewDataFrameTransform>
previewDataFrameTransform: ApiMethod<RequestParams.DataFramePreviewDataFrameTransform>
put_data_frame_transform: ApiMethod<RequestParams.DataFramePutDataFrameTransform>
putDataFrameTransform: ApiMethod<RequestParams.DataFramePutDataFrameTransform>
start_data_frame_transform: ApiMethod<RequestParams.DataFrameStartDataFrameTransform>
startDataFrameTransform: ApiMethod<RequestParams.DataFrameStartDataFrameTransform>
stop_data_frame_transform: ApiMethod<RequestParams.DataFrameStopDataFrameTransform>
stopDataFrameTransform: ApiMethod<RequestParams.DataFrameStopDataFrameTransform>
}
delete: ApiMethod<RequestParams.Delete>
delete_by_query: ApiMethod<RequestParams.DeleteByQuery>
deleteByQuery: ApiMethod<RequestParams.DeleteByQuery>
@ -458,6 +490,8 @@ declare class Client extends EventEmitter {
stop_job: ApiMethod<RequestParams.RollupStopJob>
stopJob: ApiMethod<RequestParams.RollupStopJob>
}
scripts_painless_context: ApiMethod<RequestParams.ScriptsPainlessContext>
scriptsPainlessContext: ApiMethod<RequestParams.ScriptsPainlessContext>
scripts_painless_execute: ApiMethod<RequestParams.ScriptsPainlessExecute>
scriptsPainlessExecute: ApiMethod<RequestParams.ScriptsPainlessExecute>
scroll: ApiMethod<RequestParams.Scroll>

View File

@ -26,6 +26,8 @@ const esDefaultRoles = [
'beats_system',
'code_admin',
'code_user',
'data_frame_transforms_admin',
'data_frame_transforms_user',
'ingest_admin',
'kibana_dashboard_only_user',
'kibana_system',

View File

@ -34,6 +34,10 @@ const esFolder = join(__dirname, '..', '..', 'elasticsearch')
const yamlFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'test')
const xPackYamlFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'test')
const customSkips = [
// Test cat indices output for closed index (pre 7.2.0) is failing
'cat.indices/10_basic.yml',
// cluster health with closed index (pre 7.2.0) is failing
'cluster.health/10_basic.yml',
// TODO: remove this once 'arbitrary_key' is implemented
// https://github.com/elastic/elasticsearch/pull/41492
'indices.split/30_copy_settings.yml',