Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b99654602a | |||
| 733070963b | |||
| 726d1824bd | |||
| e94eefe8a2 | |||
| cd61e30bb3 | |||
| 21683e6826 | |||
| 647546a4e5 | |||
| 1a6c36e291 | |||
| a34c6dd3a7 | |||
| 2a59c634f7 | |||
| 2d9bfd6730 | |||
| 68730dc0e6 | |||
| 0f60d78e5d | |||
| 0455b76fb8 | |||
| 63a68fb615 | |||
| d58365eb70 | |||
| 51568ed505 | |||
| be7c9f5e9d | |||
| 35b03aed17 | |||
| c51fbfaafd | |||
| 95847f030c | |||
| c0000aa207 | |||
| da7220ad8a | |||
| b52b96b95e | |||
| 79c9c8e03c | |||
| 0e1f526f55 | |||
| a4460b719d | |||
| 44698e5b44 | |||
| 077a13e39d | |||
| ed8caabf69 | |||
| c30e4cdb95 | |||
| 3eac66e47a | |||
| 2e9825808f |
224
.ci/Jenkinsfile
vendored
224
.ci/Jenkinsfile
vendored
@ -1,224 +0,0 @@
|
||||
#!/usr/bin/env groovy
|
||||
|
||||
@Library('apm@current') _
|
||||
|
||||
def NODE_JS_VERSIONS = [8,10,12]
|
||||
def nodeJsVersion = NODE_JS_VERSIONS[randomNumber(min: 0, max:2)]
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
label 'docker && immutable'
|
||||
}
|
||||
|
||||
environment {
|
||||
REPO = 'elasticsearch-js'
|
||||
BASE_DIR = "src/github.com/elastic/${env.REPO}"
|
||||
NODE_JS_DEFAULT_VERSION = "${nodeJsVersion}"
|
||||
NODE_JS_VERSIONS = "${NODE_JS_VERSIONS.join(',')}"
|
||||
HOME = "${env.WORKSPACE}"
|
||||
npm_config_cache = 'npm-cache'
|
||||
}
|
||||
|
||||
options {
|
||||
timeout(time: 1, unit: 'HOURS')
|
||||
buildDiscarder(logRotator(numToKeepStr: '20', artifactNumToKeepStr: '20', daysToKeepStr: '30'))
|
||||
timestamps()
|
||||
ansiColor('xterm')
|
||||
disableResume()
|
||||
durabilityHint('PERFORMANCE_OPTIMIZED')
|
||||
}
|
||||
|
||||
triggers {
|
||||
issueCommentTrigger('(?i).*(?:jenkins\\W+)?run\\W+(?:the\\W+)?tests(?:\\W+please)?.*')
|
||||
// env.CHANGE_ID as a value in case of a commit or a pr, which means
|
||||
// that we will have a daily cron job only for branches that don't have an active pr
|
||||
cron(env.CHANGE_ID ? '' : '@daily')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
deleteDir()
|
||||
gitCheckout(basedir: "${BASE_DIR}", githubNotifyFirstTimeContributor: false)
|
||||
stash allowEmpty: true, name: 'source', useDefaultExcludes: false
|
||||
}
|
||||
}
|
||||
|
||||
stage('Install dependencies') {
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
deleteDir()
|
||||
unstash 'source'
|
||||
script {
|
||||
buildDockerImage(image: "node:${env.NODE_JS_DEFAULT_VERSION}-alpine").inside(){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'System info', script: 'node --version; npm --version')
|
||||
sh(label: 'Install dependencies', script: 'npm install')
|
||||
}
|
||||
}
|
||||
}
|
||||
stash allowEmpty: true, name: 'source-dependencies', useDefaultExcludes: false
|
||||
}
|
||||
}
|
||||
|
||||
stage('License check') {
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
withGithubNotify(context: 'License check') {
|
||||
deleteDir()
|
||||
unstash 'source-dependencies'
|
||||
script {
|
||||
buildDockerImage(image: "node:${env.NODE_JS_DEFAULT_VERSION}-alpine").inside(){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Check production dependencies licenses', script: 'npm run license-checker')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Linter') {
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
withGithubNotify(context: 'Linter') {
|
||||
deleteDir()
|
||||
unstash 'source-dependencies'
|
||||
script {
|
||||
buildDockerImage(image: "node:${env.NODE_JS_DEFAULT_VERSION}-alpine").inside(){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Lint code with standardjs', script: 'npm run lint')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Unit test') {
|
||||
failFast true
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
withGithubNotify(context: 'Unit test') {
|
||||
script {
|
||||
def versions = env.NODE_JS_VERSIONS.split(',')
|
||||
def parallelTasks = [:]
|
||||
versions.each{ version ->
|
||||
parallelTasks["Node.js v${version}"] = buildUnitTest(version: version)
|
||||
}
|
||||
parallel(parallelTasks)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Integration test') {
|
||||
failFast true
|
||||
options { skipDefaultCheckout() }
|
||||
parallel {
|
||||
stage('OSS') {
|
||||
agent { label 'docker && immutable' }
|
||||
options { skipDefaultCheckout() }
|
||||
environment {
|
||||
TEST_ES_SERVER = 'http://elasticsearch:9200'
|
||||
}
|
||||
steps {
|
||||
withGithubNotify(context: 'Integration test OSS') {
|
||||
deleteDir()
|
||||
unstash 'source-dependencies'
|
||||
dir("${BASE_DIR}"){
|
||||
// Sometimes the docker registry fails and has random timeouts
|
||||
// this block will retry a doker image 3 times before to fail.
|
||||
retry(3) {
|
||||
sleep randomNumber(min: 5, max: 10)
|
||||
sh(label: 'Start Elasticsearch', script: './scripts/es-docker.sh --detach')
|
||||
}
|
||||
}
|
||||
script {
|
||||
buildDockerImage(fromDockerfile: true).inside('--network=elastic'){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Integration test', script: 'npm run test:integration | tee test-integration.tap')
|
||||
sh(label: 'Generating test reporting', script: './node_modules/.bin/tap-mocha-reporter xunit < test-integration.tap > junit-integration.xml')
|
||||
}
|
||||
}
|
||||
}
|
||||
sh(label: 'Stop Elasticsearch', script: 'docker kill $(docker ps -q)')
|
||||
junit(allowEmptyResults: true, keepLongStdio: true, testResults: "${BASE_DIR}/**/junit-*.xml")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('xPack') {
|
||||
agent { label 'docker && immutable' }
|
||||
options { skipDefaultCheckout() }
|
||||
environment {
|
||||
TEST_ES_SERVER = 'https://elastic:changeme@elasticsearch:9200'
|
||||
}
|
||||
steps {
|
||||
withGithubNotify(context: 'Integration test xPack') {
|
||||
deleteDir()
|
||||
unstash 'source-dependencies'
|
||||
dir("${BASE_DIR}"){
|
||||
// Sometimes the docker registry fails and has random timeouts
|
||||
// this block will retry a doker image 3 times before to fail.
|
||||
retry(3) {
|
||||
sleep randomNumber(min: 5, max: 10)
|
||||
sh(label: 'Start Elasticsearch', script: './scripts/es-docker-platinum.sh --detach')
|
||||
}
|
||||
}
|
||||
script {
|
||||
buildDockerImage(fromDockerfile: true).inside('--network=elastic'){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Integration test', script: 'npm run test:integration | tee test-integration.tap')
|
||||
sh(label: 'Generating test reporting', script: './node_modules/.bin/tap-mocha-reporter xunit < test-integration.tap > junit-integration.xml')
|
||||
}
|
||||
}
|
||||
}
|
||||
sh(label: 'Stop Elasticsearch', script: 'docker kill $(docker ps -q)')
|
||||
junit(allowEmptyResults: true, keepLongStdio: true, testResults: "${BASE_DIR}/**/junit-*.xml")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sometimes the docker registry fails and has random timeouts
|
||||
// this function will retry a doker image 3 times before to fail.
|
||||
def buildDockerImage(args) {
|
||||
def image
|
||||
retry(3) {
|
||||
sleep randomNumber(min: 5, max: 10)
|
||||
if (args.fromDockerfile == true) {
|
||||
image = docker.build('nodejs-image', "--build-arg NODE_JS_VERSION=${env.NODE_JS_DEFAULT_VERSION} ${BASE_DIR}/.ci/docker")
|
||||
} else {
|
||||
image = docker.image(args.image)
|
||||
// make sure we have the latest available from Docker Hub
|
||||
image.pull()
|
||||
}
|
||||
}
|
||||
return image
|
||||
}
|
||||
|
||||
def buildUnitTest(args) {
|
||||
return {
|
||||
node('docker && immutable') {
|
||||
deleteDir()
|
||||
unstash 'source'
|
||||
script {
|
||||
buildDockerImage(image: "node:${args.version}-alpine").inside(){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Install dependencies', script: 'npm install')
|
||||
sh(label: 'Run unit test', script: 'npm run test:unit | tee test-unit.tap')
|
||||
sh(label: 'Run behavior test', script: 'npm run test:behavior | tee test-behavior.tap')
|
||||
sh(label: 'Run types test', script: 'npm run test:types')
|
||||
sh(label: 'Generating test reporting', script: './node_modules/.bin/tap-mocha-reporter xunit < test-unit.tap > junit-unit.xml; ./node_modules/.bin/tap-mocha-reporter xunit < test-behavior.tap > junit-behavior.xml')
|
||||
}
|
||||
}
|
||||
}
|
||||
junit(allowEmptyResults: true, keepLongStdio: true, testResults: "${BASE_DIR}/**/junit-*.xml")
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,94 +0,0 @@
|
||||
version: '3.2'
|
||||
services:
|
||||
client-oss:
|
||||
image: docker.elastic.co/clients/elasticsearch-js:${NODE_JS_VERSION:-10}
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: .ci/Dockerfile
|
||||
args:
|
||||
NODE_JS_VERSION: ${NODE_JS_VERSION:-10}
|
||||
CODECOV_TOKEN: ${CODECOV_TOKEN}
|
||||
environment:
|
||||
- "TEST_ES_SERVER=http://elasticsearch-oss:9200"
|
||||
volumes:
|
||||
- ..:/usr/src/app
|
||||
# This will mount the node_modules directory
|
||||
# to the host machine using the buildtime directory.
|
||||
- /usr/src/app/node_modules
|
||||
- esvol:/tmp
|
||||
networks:
|
||||
- esnet-oss
|
||||
depends_on:
|
||||
- elasticsearch-oss
|
||||
|
||||
elasticsearch-oss:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${ELASTICSEARCH_VERSION:-8.0.0-SNAPSHOT}
|
||||
volumes:
|
||||
- esvol:/tmp
|
||||
networks:
|
||||
- esnet-oss
|
||||
environment:
|
||||
- path.repo=/tmp
|
||||
- "repositories.url.allowed_urls=http://snapshot.*"
|
||||
- node.attr.testattr=test
|
||||
- bootstrap.memory_lock=false
|
||||
- "discovery.type=single-node"
|
||||
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||
|
||||
client-platinum:
|
||||
image: docker.elastic.co/clients/elasticsearch-js:${NODE_JS_VERSION:-10}
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: .ci/Dockerfile
|
||||
args:
|
||||
NODE_JS_VERSION: ${NODE_JS_VERSION:-10}
|
||||
environment:
|
||||
- "TEST_ES_SERVER=https://elastic:changeme@elasticsearch-platinum:9200"
|
||||
volumes:
|
||||
- ..:/usr/src/app
|
||||
# This will mount the node_modules directory
|
||||
# to the host machine using the buildtime directory.
|
||||
- /usr/src/app/node_modules
|
||||
- esvol:/tmp
|
||||
networks:
|
||||
- esnet-platinum
|
||||
depends_on:
|
||||
- elasticsearch-platinum
|
||||
# there is not need to run again also the unit test
|
||||
command: ["npm", "run", "test:integration"]
|
||||
|
||||
elasticsearch-platinum:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:${ELASTICSEARCH_VERSION:-8.0.0-SNAPSHOT}
|
||||
ports:
|
||||
- "9200:9200"
|
||||
networks:
|
||||
- esnet-platinum
|
||||
environment:
|
||||
- "node.attr.testattr=test"
|
||||
- "path.repo=/tmp"
|
||||
- "repositories.url.allowed_urls=http://snapshot.*"
|
||||
- "discovery.type=single-node"
|
||||
- "ES_JAVA_OPTS=-Xms1g -Xmx1g"
|
||||
- "ELASTIC_PASSWORD=changeme"
|
||||
- "xpack.security.enabled=true"
|
||||
- "xpack.license.self_generated.type=trial"
|
||||
- "xpack.security.http.ssl.enabled=true"
|
||||
- "xpack.security.http.ssl.verification_mode=certificate"
|
||||
- "xpack.security.http.ssl.key=certs/testnode.key"
|
||||
- "xpack.security.http.ssl.certificate=certs/testnode.crt"
|
||||
- "xpack.security.http.ssl.certificate_authorities=certs/ca.crt"
|
||||
- "xpack.security.transport.ssl.enabled=true"
|
||||
- "xpack.security.transport.ssl.key=certs/testnode.key"
|
||||
- "xpack.security.transport.ssl.certificate=certs/testnode.crt"
|
||||
- "xpack.security.transport.ssl.certificate_authorities=certs/ca.crt"
|
||||
volumes:
|
||||
- "./certs/testnode.crt:/usr/share/elasticsearch/config/certs/testnode.crt"
|
||||
- "./certs/testnode.key:/usr/share/elasticsearch/config/certs/testnode.key"
|
||||
- "./certs/ca.crt:/usr/share/elasticsearch/config/certs/ca.crt"
|
||||
|
||||
networks:
|
||||
# we need two networks otherwise the two ES instances will join each other
|
||||
esnet-oss:
|
||||
esnet-platinum:
|
||||
volumes:
|
||||
esvol:
|
||||
74
.ci/jobs/defaults.yml
Normal file
74
.ci/jobs/defaults.yml
Normal file
@ -0,0 +1,74 @@
|
||||
---
|
||||
|
||||
##### GLOBAL METADATA
|
||||
|
||||
- meta:
|
||||
cluster: clients-ci
|
||||
|
||||
##### JOB DEFAULTS
|
||||
|
||||
- job:
|
||||
project-type: matrix
|
||||
logrotate:
|
||||
daysToKeep: 30
|
||||
numToKeep: 100
|
||||
properties:
|
||||
- github:
|
||||
url: https://github.com/elastic/elasticsearch-js/
|
||||
- inject:
|
||||
properties-content: HOME=$JENKINS_HOME
|
||||
concurrent: true
|
||||
node: flyweight
|
||||
scm:
|
||||
- git:
|
||||
name: origin
|
||||
credentials-id: f6c7695a-671e-4f4f-a331-acdce44ff9ba
|
||||
reference-repo: /var/lib/jenkins/.git-references/elasticsearch-js.git
|
||||
branches:
|
||||
- ${branch_specifier}
|
||||
url: https://github.com/elastic/elasticsearch-js.git
|
||||
wipe-workspace: 'True'
|
||||
triggers:
|
||||
- github
|
||||
vault:
|
||||
# vault read auth/approle/role/clients-ci/role-id
|
||||
role_id: ddbd0d44-0e51-105b-177a-c8fdfd445126
|
||||
axes:
|
||||
- axis:
|
||||
type: slave
|
||||
name: label
|
||||
values:
|
||||
- linux
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: ELASTICSEARCH_VERSION
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: NODE_JS_VERSION
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: TEST_SUITE
|
||||
yaml-strategy:
|
||||
exclude-key: exclude
|
||||
filename: .ci/test-matrix.yml
|
||||
wrappers:
|
||||
- ansicolor
|
||||
- timeout:
|
||||
type: absolute
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
- workspace-cleanup
|
||||
builders:
|
||||
- shell: |-
|
||||
#!/usr/local/bin/runbld
|
||||
.ci/run-tests
|
||||
publishers:
|
||||
- email:
|
||||
recipients: infra-root+build@elastic.co
|
||||
# - junit:
|
||||
# results: "*-junit.xml"
|
||||
# allow-empty-results: true
|
||||
14
.ci/jobs/elastic+elasticsearch-js+5.x.yml
Normal file
14
.ci/jobs/elastic+elasticsearch-js+5.x.yml
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+5.x
|
||||
display-name: 'elastic / elasticsearch-js # 5.x'
|
||||
description: Testing the elasticsearch-js 5.x branch.
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/5.x
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@weekly'
|
||||
14
.ci/jobs/elastic+elasticsearch-js+6.x.yml
Normal file
14
.ci/jobs/elastic+elasticsearch-js+6.x.yml
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+6.x
|
||||
display-name: 'elastic / elasticsearch-js # 6.x'
|
||||
description: Testing the elasticsearch-js 6.x branch.
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/6.x
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@weekly'
|
||||
14
.ci/jobs/elastic+elasticsearch-js+7.x.yml
Normal file
14
.ci/jobs/elastic+elasticsearch-js+7.x.yml
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+7.x
|
||||
display-name: 'elastic / elasticsearch-js # 7.x'
|
||||
description: Testing the elasticsearch-js 7.x branch.
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/7.x
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@weekly'
|
||||
14
.ci/jobs/elastic+elasticsearch-js+master.yml
Normal file
14
.ci/jobs/elastic+elasticsearch-js+master.yml
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+master
|
||||
display-name: 'elastic / elasticsearch-js # master'
|
||||
description: Testing the elasticsearch-js master branch.
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/master
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@daily'
|
||||
@ -1,54 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+jenkins-pipelines
|
||||
display-name: 'elastic / elasticsearch-js # jenkins-pipelines'
|
||||
description: Testing the elasticsearch-js jenkins-pipelines branch.
|
||||
project-type: multibranch
|
||||
logrotate:
|
||||
daysToKeep: 30
|
||||
numToKeep: 100
|
||||
number-to-keep: '5'
|
||||
days-to-keep: '1'
|
||||
concurrent: true
|
||||
node: linux
|
||||
script-path: Jenkinsfile
|
||||
scm:
|
||||
- github:
|
||||
branch-discovery: all
|
||||
discover-pr-forks-strategy: merge-current
|
||||
discover-pr-forks-trust: permission
|
||||
discover-pr-origin: merge-current
|
||||
discover-tags: true
|
||||
repo: elasticsearch-js
|
||||
repo-owner: elastic
|
||||
credentials-id: 2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken
|
||||
ssh-checkout:
|
||||
credentials: f6c7695a-671e-4f4f-a331-acdce44ff9ba
|
||||
build-strategies:
|
||||
- tags:
|
||||
ignore-tags-older-than: -1
|
||||
ignore-tags-newer-than: -1
|
||||
- regular-branches: true
|
||||
- change-request:
|
||||
ignore-target-only-changes: false
|
||||
clean:
|
||||
after: true
|
||||
before: true
|
||||
prune: true
|
||||
shallow-clone: true
|
||||
depth: 3
|
||||
do-not-fetch-tags: true
|
||||
submodule:
|
||||
disable: false
|
||||
recursive: true
|
||||
parent-credentials: true
|
||||
timeout: 100
|
||||
timeout: '15'
|
||||
use-author: true
|
||||
wipe-workspace: 'True'
|
||||
periodic-folder-trigger: 1d
|
||||
prune-dead-branches: true
|
||||
publishers:
|
||||
- email:
|
||||
recipients: infra-root+build@elastic.co
|
||||
|
||||
19
.ci/jobs/elastic+elasticsearch-js+pull-request.yml
Normal file
19
.ci/jobs/elastic+elasticsearch-js+pull-request.yml
Normal file
@ -0,0 +1,19 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+pull-request
|
||||
display-name: 'elastic / elasticsearch-js # pull-request'
|
||||
description: Testing of elasticsearch-js pull requests.
|
||||
scm:
|
||||
- git:
|
||||
branches:
|
||||
- ${ghprbActualCommit}
|
||||
refspec: +refs/pull/*:refs/remotes/origin/pr/*
|
||||
triggers:
|
||||
- github-pull-request:
|
||||
org-list:
|
||||
- elastic
|
||||
allow-whitelist-orgs-as-admins: true
|
||||
github-hooks: true
|
||||
status-context: clients-ci
|
||||
cancel-builds-on-update: true
|
||||
publishers: []
|
||||
201
.ci/run-elasticsearch.sh
Normal file
201
.ci/run-elasticsearch.sh
Normal file
@ -0,0 +1,201 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Launch one or more Elasticsearch nodes via the Docker image,
|
||||
# to form a cluster suitable for running the REST API tests.
|
||||
#
|
||||
# Export the ELASTICSEARCH_VERSION variable, eg. 'elasticsearch:8.0.0-SNAPSHOT'.
|
||||
|
||||
# Version 1.0
|
||||
# - Initial version of the run-elasticsearch.sh script
|
||||
|
||||
|
||||
if [[ -z "$ELASTICSEARCH_VERSION" ]]; then
|
||||
echo -e "\033[31;1mERROR:\033[0m Required environment variable [ELASTICSEARCH_VERSION] not set\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SCRIPT_PATH=$(dirname $(realpath -s $0))
|
||||
|
||||
moniker=$(echo "$ELASTICSEARCH_VERSION" | tr -C "[:alnum:]" '-')
|
||||
suffix=rest-test
|
||||
|
||||
NODE_NAME=${NODE_NAME-${moniker}node1}
|
||||
MASTER_NODE_NAME=${MASTER_NODE_NAME-${NODE_NAME}}
|
||||
CLUSTER_NAME=${CLUSTER_NAME-${moniker}${suffix}}
|
||||
HTTP_PORT=${HTTP_PORT-9200}
|
||||
|
||||
ELASTIC_PASSWORD=${ELASTIC_PASSWORD-changeme}
|
||||
SSL_CERT=${SSL_CERT-"${SCRIPT_PATH}/certs/testnode.crt"}
|
||||
SSL_KEY=${SSL_KEY-"${SCRIPT_PATH}/certs/testnode.key"}
|
||||
SSL_CA=${SSL_CA-"${SCRIPT_PATH}/certs/ca.crt"}
|
||||
SSL_CA_PEM=${SSL_CA-"${SCRIPT_PATH}/certs/ca.pem"}
|
||||
|
||||
DETACH=${DETACH-false}
|
||||
CLEANUP=${CLEANUP-false}
|
||||
|
||||
volume_name=${NODE_NAME}-${suffix}-data
|
||||
network_default=${moniker}${suffix}
|
||||
NETWORK_NAME=${NETWORK_NAME-"$network_default"}
|
||||
|
||||
set +x
|
||||
|
||||
function cleanup_volume {
|
||||
if [[ "$(docker volume ls -q -f name=$1)" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m Removing volume $1\033[0m"
|
||||
(docker volume rm "$1") || true
|
||||
fi
|
||||
}
|
||||
function container_running {
|
||||
if [[ "$(docker ps -q -f name=$1)" ]]; then
|
||||
return 0;
|
||||
else return 1;
|
||||
fi
|
||||
}
|
||||
function cleanup_node {
|
||||
if container_running "$1"; then
|
||||
echo -e "\033[34;1mINFO:\033[0m Removing container $1\033[0m"
|
||||
(docker container rm --force --volumes "$1") || true
|
||||
cleanup_volume "$1-${suffix}-data"
|
||||
fi
|
||||
}
|
||||
function cleanup_network {
|
||||
if [[ "$(docker network ls -q -f name=$1)" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m Removing network $1\033[0m"
|
||||
(docker network rm "$1") || true
|
||||
fi
|
||||
}
|
||||
|
||||
function cleanup {
|
||||
if [[ "$DETACH" != "true" ]] || [[ "$1" == "1" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m clean the node and volume on startup (1) OR on exit if not detached\033[0m"
|
||||
cleanup_node "$NODE_NAME"
|
||||
fi
|
||||
if [[ "$DETACH" != "true" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m clean the network if not detached (start and exit)\033[0m"
|
||||
cleanup_network "$NETWORK_NAME"
|
||||
fi
|
||||
};
|
||||
trap "cleanup 0" EXIT
|
||||
|
||||
if [[ "$CLEANUP" == "true" ]]; then
|
||||
trap - EXIT
|
||||
if [[ -z "$(docker network ls -q -f name=${NETWORK_NAME})" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m $NETWORK_NAME is already deleted\033[0m"
|
||||
exit 0
|
||||
fi
|
||||
containers=$(docker network inspect -f '{{ range $key, $value := .Containers }}{{ printf "%s\n" .Name}}{{ end }}' ${NETWORK_NAME})
|
||||
while read -r container; do
|
||||
cleanup_node "$container"
|
||||
done <<< "$containers"
|
||||
cleanup_network "$NETWORK_NAME"
|
||||
echo -e "\033[32;1mSUCCESS:\033[0m Cleaned up and exiting\033[0m"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m Making sure previous run leftover infrastructure is removed \033[0m"
|
||||
cleanup 1
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m Creating network $NETWORK_NAME if it does not exist already \033[0m"
|
||||
docker network inspect "$NETWORK_NAME" > /dev/null 2>&1 || docker network create "$NETWORK_NAME"
|
||||
|
||||
environment=($(cat <<-END
|
||||
--env node.name=$NODE_NAME
|
||||
--env cluster.name=$CLUSTER_NAME
|
||||
--env cluster.initial_master_nodes=$MASTER_NODE_NAME
|
||||
--env discovery.seed_hosts=$MASTER_NODE_NAME
|
||||
--env cluster.routing.allocation.disk.threshold_enabled=false
|
||||
--env bootstrap.memory_lock=true
|
||||
--env node.attr.testattr=test
|
||||
--env path.repo=/tmp
|
||||
--env repositories.url.allowed_urls=http://snapshot.test*
|
||||
END
|
||||
))
|
||||
|
||||
volumes=($(cat <<-END
|
||||
--volume $volume_name:/usr/share/elasticsearch/data
|
||||
END
|
||||
))
|
||||
|
||||
if [[ "$ELASTICSEARCH_VERSION" != *oss* ]]; then
|
||||
environment+=($(cat <<-END
|
||||
--env ELASTIC_PASSWORD=$ELASTIC_PASSWORD
|
||||
--env xpack.license.self_generated.type=trial
|
||||
--env xpack.security.enabled=true
|
||||
--env xpack.security.http.ssl.enabled=true
|
||||
--env xpack.security.http.ssl.verification_mode=certificate
|
||||
--env xpack.security.http.ssl.key=certs/testnode.key
|
||||
--env xpack.security.http.ssl.certificate=certs/testnode.crt
|
||||
--env xpack.security.http.ssl.certificate_authorities=certs/ca.crt
|
||||
--env xpack.security.transport.ssl.enabled=true
|
||||
--env xpack.security.transport.ssl.key=certs/testnode.key
|
||||
--env xpack.security.transport.ssl.certificate=certs/testnode.crt
|
||||
--env xpack.security.transport.ssl.certificate_authorities=certs/ca.crt
|
||||
END
|
||||
))
|
||||
volumes+=($(cat <<-END
|
||||
--volume $SSL_CERT:/usr/share/elasticsearch/config/certs/testnode.crt
|
||||
--volume $SSL_KEY:/usr/share/elasticsearch/config/certs/testnode.key
|
||||
--volume $SSL_CA:/usr/share/elasticsearch/config/certs/ca.crt
|
||||
--volume $SSL_CA_PEM:/usr/share/elasticsearch/config/certs/ca.pem
|
||||
END
|
||||
))
|
||||
fi
|
||||
|
||||
url="http://$NODE_NAME"
|
||||
if [[ "$ELASTICSEARCH_VERSION" != *oss* ]]; then
|
||||
url="https://elastic:$ELASTIC_PASSWORD@$NODE_NAME"
|
||||
fi
|
||||
|
||||
cert_validation_flags="--insecure"
|
||||
if [[ "$NODE_NAME" == "instance" ]]; then
|
||||
cert_validation_flags="--cacert /usr/share/elasticsearch/config/certs/ca.pem --resolve ${NODE_NAME}:443:127.0.0.1"
|
||||
fi
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m Starting container $NODE_NAME \033[0m"
|
||||
set -x
|
||||
docker run \
|
||||
--name "$NODE_NAME" \
|
||||
--network "$NETWORK_NAME" \
|
||||
--env ES_JAVA_OPTS=-"Xms1g -Xmx1g" \
|
||||
"${environment[@]}" \
|
||||
"${volumes[@]}" \
|
||||
--publish "$HTTP_PORT":9200 \
|
||||
--ulimit nofile=65536:65536 \
|
||||
--ulimit memlock=-1:-1 \
|
||||
--detach="$DETACH" \
|
||||
--health-cmd="curl $cert_validation_flags --fail $url:9200/_cluster/health || exit 1" \
|
||||
--health-interval=2s \
|
||||
--health-retries=20 \
|
||||
--health-timeout=2s \
|
||||
--rm \
|
||||
docker.elastic.co/elasticsearch/"$ELASTICSEARCH_VERSION";
|
||||
set +x
|
||||
|
||||
if [[ "$DETACH" == "true" ]]; then
|
||||
until ! container_running "$NODE_NAME" || (container_running "$NODE_NAME" && [[ "$(docker inspect -f "{{.State.Health.Status}}" ${NODE_NAME})" != "starting" ]]); do
|
||||
echo ""
|
||||
docker inspect -f "{{range .State.Health.Log}}{{.Output}}{{end}}" ${NODE_NAME}
|
||||
echo -e "\033[34;1mINFO:\033[0m waiting for node $NODE_NAME to be up\033[0m"
|
||||
sleep 2;
|
||||
done;
|
||||
|
||||
# Always show logs if the container is running, this is very useful both on CI as well as while developing
|
||||
if container_running $NODE_NAME; then
|
||||
docker logs $NODE_NAME
|
||||
fi
|
||||
|
||||
if ! container_running $NODE_NAME || [[ "$(docker inspect -f "{{.State.Health.Status}}" ${NODE_NAME})" != "healthy" ]]; then
|
||||
cleanup 1
|
||||
echo
|
||||
echo -e "\033[31;1mERROR:\033[0m Failed to start ${ELASTICSEARCH_VERSION} in detached mode beyond health checks\033[0m"
|
||||
echo -e "\033[31;1mERROR:\033[0m dumped the docker log before shutting the node down\033[0m"
|
||||
exit 1
|
||||
else
|
||||
echo
|
||||
echo -e "\033[32;1mSUCCESS:\033[0m Detached and healthy: ${NODE_NAME} on docker network: ${NETWORK_NAME}\033[0m"
|
||||
echo -e "\033[32;1mSUCCESS:\033[0m Running on: ${url/$NODE_NAME/localhost}:${HTTP_PORT}\033[0m"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
61
.ci/run-repository.sh
Executable file
61
.ci/run-repository.sh
Executable file
@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env bash
|
||||
# parameters are available to this script
|
||||
|
||||
# ELASTICSEARCH_VERSION -- version e.g Major.Minor.Patch(-Prelease)
|
||||
# ELASTICSEARCH_CONTAINER -- the docker moniker as a reference to know which docker image distribution is used
|
||||
# ELASTICSEARCH_URL -- The url at which elasticsearch is reachable
|
||||
# NETWORK_NAME -- The docker network name
|
||||
# NODE_NAME -- The docker container name also used as Elasticsearch node name
|
||||
# NODE_JS_VERSION -- node js version (defined in test-matrix.yml, a default is hardcoded here)
|
||||
|
||||
NODE_JS_VERSION=${NODE_JS_VERSION-12}
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m URL ${ELASTICSEARCH_URL}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m VERSION ${ELASTICSEARCH_VERSION}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m CONTAINER ${ELASTICSEARCH_CONTAINER}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m TEST_SUITE ${TEST_SUITE}\033[0m"
|
||||
echo -e "\033[34;1mINFO:\033[0m NODE_JS_VERSION ${NODE_JS_VERSION}\033[0m"
|
||||
|
||||
echo -e "\033[1m>>>>> Build docker container >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
set +x
|
||||
export VAULT_TOKEN=$(vault write -field=token auth/approle/login role_id="$VAULT_ROLE_ID" secret_id="$VAULT_SECRET_ID")
|
||||
export CODECOV_TOKEN=$(vault read -field=token secret/clients-ci/elasticsearch-js/codecov)
|
||||
unset VAULT_ROLE_ID VAULT_SECRET_ID VAULT_TOKEN
|
||||
set -x
|
||||
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
|
||||
.
|
||||
|
||||
echo -e "\033[1m>>>>> NPM run ci >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
repo=$(realpath $(dirname $(realpath -s $0))/../)
|
||||
|
||||
if [[ $TEST_SUITE != "xpack" ]]; then
|
||||
docker run \
|
||||
--network=${NETWORK_NAME} \
|
||||
--env "TEST_ES_SERVER=${ELASTICSEARCH_URL}" \
|
||||
--env "CODECOV_TOKEN" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run ci
|
||||
else
|
||||
docker run \
|
||||
--network=${NETWORK_NAME} \
|
||||
--env "TEST_ES_SERVER=${ELASTICSEARCH_URL}" \
|
||||
--env "CODECOV_TOKEN" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run test:integration
|
||||
fi
|
||||
143
.ci/run-tests
143
.ci/run-tests
@ -1,110 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Version 1.0
|
||||
# - Moved to .ci folder and seperated out `run-repository.sh`
|
||||
|
||||
#
|
||||
# Runs the client tests via Docker with the expectation that the required
|
||||
# environment variables have already been exported before running this script.
|
||||
#
|
||||
# The required environment variables include:
|
||||
#
|
||||
# - $ELASTICSEARCH_VERSION
|
||||
# - $NODE_JS_VERSION
|
||||
#
|
||||
if [[ -z $ELASTICSEARCH_VERSION ]]; then
|
||||
echo -e "\033[31;1mERROR:\033[0m Required environment variable [ELASTICSEARCH_VERSION] not set\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
set -euxo pipefail
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
set +x
|
||||
export VAULT_TOKEN=$(vault write -field=token auth/approle/login role_id="$VAULT_ROLE_ID" secret_id="$VAULT_SECRET_ID")
|
||||
export CODECOV_TOKEN=$(vault read -field=token secret/clients-ci/elasticsearch-js/codecov)
|
||||
unset VAULT_ROLE_ID VAULT_SECRET_ID VAULT_TOKEN
|
||||
set -x
|
||||
TEST_SUITE=${TEST_SUITE-oss}
|
||||
NODE_NAME=instance
|
||||
|
||||
|
||||
elasticsearch_image=elasticsearch
|
||||
elasticsearch_url=https://elastic:changeme@${NODE_NAME}:9200
|
||||
if [[ $TEST_SUITE != "xpack" ]]; then
|
||||
elasticsearch_image=elasticsearch-${TEST_SUITE}
|
||||
elasticsearch_url=http://${NODE_NAME}:9200
|
||||
fi
|
||||
|
||||
function cleanup {
|
||||
docker container rm --force --volumes elasticsearch-oss > /dev/null 2>&1 || true
|
||||
docker container rm --force --volumes elasticsearch-platinum > /dev/null 2>&1 || true
|
||||
docker container rm --force --volumes elasticsearch-js-oss > /dev/null 2>&1 || true
|
||||
docker container rm --force --volumes elasticsearch-js-platinum > /dev/null 2>&1 || true
|
||||
docker network rm esnet-oss > /dev/null
|
||||
docker network rm esnet-platinum > /dev/null
|
||||
status=$?
|
||||
set +x
|
||||
ELASTICSEARCH_VERSION=${elasticsearch_image}:${ELASTICSEARCH_VERSION} \
|
||||
NODE_NAME=${NODE_NAME} \
|
||||
NETWORK_NAME=elasticsearch \
|
||||
CLEANUP=true \
|
||||
bash ./.ci/run-elasticsearch.sh
|
||||
# Report status and exit
|
||||
if [[ "$status" == "0" ]]; then
|
||||
echo -e "\n\033[32;1mSUCCESS run-tests\033[0m"
|
||||
exit 0
|
||||
else
|
||||
echo -e "\n\033[31;1mFAILURE during run-tests\033[0m"
|
||||
exit ${status}
|
||||
fi
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
# create network and volume
|
||||
docker network create esnet-oss
|
||||
docker network create esnet-platinum
|
||||
echo -e "\033[1m>>>>> Start [$ELASTICSEARCH_VERSION container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
# create client image
|
||||
docker build \
|
||||
--file .ci/Dockerfile \
|
||||
--tag elastic/elasticsearch-js \
|
||||
--build-arg NODE_JS_VERSION=${NODE_JS_VERSION} \
|
||||
.
|
||||
ELASTICSEARCH_VERSION=${elasticsearch_image}:${ELASTICSEARCH_VERSION} \
|
||||
NODE_NAME=${NODE_NAME} \
|
||||
NETWORK_NAME=elasticsearch \
|
||||
DETACH=true \
|
||||
bash .ci/run-elasticsearch.sh
|
||||
|
||||
# run elasticsearch oss
|
||||
docker run \
|
||||
--rm \
|
||||
--env "node.attr.testattr=test" \
|
||||
--env "path.repo=/tmp" \
|
||||
--env "repositories.url.allowed_urls=http://snapshot.*" \
|
||||
--env "discovery.type=single-node" \
|
||||
--network=esnet-oss \
|
||||
--name=elasticsearch-oss \
|
||||
--detach \
|
||||
docker.elastic.co/elasticsearch/elasticsearch-oss:${ELASTICSEARCH_VERSION}
|
||||
|
||||
# run elasticsearch platinum
|
||||
repo=$(pwd)
|
||||
testnodecrt="/.ci/certs/testnode.crt"
|
||||
testnodekey="/.ci/certs/testnode.key"
|
||||
cacrt="/.ci/certs/ca.crt"
|
||||
|
||||
docker run \
|
||||
--rm \
|
||||
--env "node.attr.testattr=test" \
|
||||
--env "path.repo=/tmp" \
|
||||
--env "repositories.url.allowed_urls=http://snapshot.*" \
|
||||
--env "discovery.type=single-node" \
|
||||
--env "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
|
||||
--env "ELASTIC_PASSWORD=changeme" \
|
||||
--env "xpack.security.enabled=true" \
|
||||
--env "xpack.license.self_generated.type=trial" \
|
||||
--env "xpack.security.http.ssl.enabled=true" \
|
||||
--env "xpack.security.http.ssl.verification_mode=certificate" \
|
||||
--env "xpack.security.http.ssl.key=certs/testnode.key" \
|
||||
--env "xpack.security.http.ssl.certificate=certs/testnode.crt" \
|
||||
--env "xpack.security.http.ssl.certificate_authorities=certs/ca.crt" \
|
||||
--env "xpack.security.transport.ssl.enabled=true" \
|
||||
--env "xpack.security.transport.ssl.key=certs/testnode.key" \
|
||||
--env "xpack.security.transport.ssl.certificate=certs/testnode.crt" \
|
||||
--env "xpack.security.transport.ssl.certificate_authorities=certs/ca.crt" \
|
||||
--volume "$repo$testnodecrt:/usr/share/elasticsearch/config/certs/testnode.crt" \
|
||||
--volume "$repo$testnodekey:/usr/share/elasticsearch/config/certs/testnode.key" \
|
||||
--volume "$repo$cacrt:/usr/share/elasticsearch/config/certs/ca.crt" \
|
||||
--network=esnet-platinum \
|
||||
--name=elasticsearch-platinum \
|
||||
--detach \
|
||||
docker.elastic.co/elasticsearch/elasticsearch:${ELASTICSEARCH_VERSION}
|
||||
|
||||
# run the client unit and oss integration test
|
||||
docker run \
|
||||
--network=esnet-oss \
|
||||
--env "TEST_ES_SERVER=http://elasticsearch-oss:9200" \
|
||||
--env "CODECOV_TOKEN" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js-oss \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run ci
|
||||
|
||||
# run the client platinium integration test
|
||||
docker run \
|
||||
--network=esnet-platinum \
|
||||
--env "TEST_ES_SERVER=https://elastic:changeme@elasticsearch-platinum:9200" \
|
||||
--volume $repo:/usr/src/app \
|
||||
--volume /usr/src/app/node_modules \
|
||||
--name elasticsearch-js-platinum \
|
||||
--rm \
|
||||
elastic/elasticsearch-js \
|
||||
npm run test:integration
|
||||
echo -e "\033[1m>>>>> Repository specific tests >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
ELASTICSEARCH_CONTAINER=${elasticsearch_image}:${ELASTICSEARCH_VERSION} \
|
||||
NETWORK_NAME=elasticsearch \
|
||||
NODE_NAME=${NODE_NAME} \
|
||||
ELASTICSEARCH_URL=${elasticsearch_url} \
|
||||
bash .ci/run-repository.sh
|
||||
|
||||
@ -1,10 +1,14 @@
|
||||
---
|
||||
ELASTICSEARCH_VERSION:
|
||||
- 7.3.0
|
||||
- 7.5.0
|
||||
|
||||
NODE_JS_VERSION:
|
||||
- 12
|
||||
- 10
|
||||
- 8
|
||||
|
||||
TEST_SUITE:
|
||||
- oss
|
||||
- xpack
|
||||
|
||||
exclude: ~
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@ -45,6 +45,9 @@ jspm_packages
|
||||
# vim swap files
|
||||
*.swp
|
||||
|
||||
#Jetbrains editor folder
|
||||
.idea
|
||||
|
||||
package-lock.json
|
||||
|
||||
# elasticsearch repo or binary files
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
# Elasticsearch Node.js client
|
||||
|
||||
[](http://standardjs.com/) [](https://clients-ci.elastic.co/job/elastic+elasticsearch-js+master/) [](https://codecov.io/gh/elastic/elasticsearch-js) [](https://www.npmjs.com/package/@elastic/elasticsearch)
|
||||
[](http://standardjs.com/) [](https://clients-ci.elastic.co/view/Javascript/job/elastic+elasticsearch-js+master/) [](https://codecov.io/gh/elastic/elasticsearch-js) [](https://www.npmjs.com/package/@elastic/elasticsearch)
|
||||
|
||||
The official Node.js client for Elasticsearch.
|
||||
|
||||
|
||||
@ -14,7 +14,6 @@ function buildCatAliases (opts) {
|
||||
const acceptedQuerystring = [
|
||||
'format',
|
||||
'local',
|
||||
'master_timeout',
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
@ -27,7 +26,6 @@ function buildCatAliases (opts) {
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
@ -13,8 +13,6 @@ function buildCatCount (opts) {
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'format',
|
||||
'local',
|
||||
'master_timeout',
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
@ -27,7 +25,6 @@ function buildCatCount (opts) {
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
@ -14,8 +14,6 @@ function buildCatFielddata (opts) {
|
||||
const acceptedQuerystring = [
|
||||
'format',
|
||||
'bytes',
|
||||
'local',
|
||||
'master_timeout',
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
@ -29,7 +27,6 @@ function buildCatFielddata (opts) {
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
@ -13,11 +13,10 @@ function buildCatHealth (opts) {
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'format',
|
||||
'local',
|
||||
'master_timeout',
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
'time',
|
||||
'ts',
|
||||
'v',
|
||||
'pretty',
|
||||
@ -28,7 +27,6 @@ function buildCatHealth (opts) {
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
@ -21,6 +21,7 @@ function buildCatIndices (opts) {
|
||||
'help',
|
||||
'pri',
|
||||
's',
|
||||
'time',
|
||||
'v',
|
||||
'include_unloaded_segments',
|
||||
'pretty',
|
||||
|
||||
@ -12,6 +12,7 @@ function buildCatNodes (opts) {
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'bytes',
|
||||
'format',
|
||||
'full_id',
|
||||
'local',
|
||||
@ -19,6 +20,7 @@ function buildCatNodes (opts) {
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
'time',
|
||||
'v',
|
||||
'pretty',
|
||||
'human',
|
||||
|
||||
@ -18,6 +18,7 @@ function buildCatPendingTasks (opts) {
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
'time',
|
||||
'v',
|
||||
'pretty',
|
||||
'human',
|
||||
|
||||
@ -13,11 +13,14 @@ function buildCatRecovery (opts) {
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'format',
|
||||
'active_only',
|
||||
'bytes',
|
||||
'master_timeout',
|
||||
'detailed',
|
||||
'h',
|
||||
'help',
|
||||
'index',
|
||||
's',
|
||||
'time',
|
||||
'v',
|
||||
'pretty',
|
||||
'human',
|
||||
@ -27,7 +30,7 @@ function buildCatRecovery (opts) {
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
masterTimeout: 'master_timeout',
|
||||
activeOnly: 'active_only',
|
||||
errorTrace: 'error_trace',
|
||||
filterPath: 'filter_path'
|
||||
}
|
||||
|
||||
@ -19,6 +19,7 @@ function buildCatShards (opts) {
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
'time',
|
||||
'v',
|
||||
'pretty',
|
||||
'human',
|
||||
|
||||
@ -18,6 +18,7 @@ function buildCatSnapshots (opts) {
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
'time',
|
||||
'v',
|
||||
'pretty',
|
||||
'human',
|
||||
|
||||
@ -20,6 +20,7 @@ function buildCatTasks (opts) {
|
||||
'h',
|
||||
'help',
|
||||
's',
|
||||
'time',
|
||||
'v',
|
||||
'pretty',
|
||||
'human',
|
||||
|
||||
77
api/api/ccr.pause_auto_follow_pattern.js
Normal file
77
api/api/ccr.pause_auto_follow_pattern.js
Normal file
@ -0,0 +1,77 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildCcrPauseAutoFollowPattern (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a ccr.pause_auto_follow_pattern request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-pause-auto-follow-pattern.html
|
||||
*/
|
||||
return function ccrPauseAutoFollowPattern (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_ccr' + '/' + 'auto_follow' + '/' + encodeURIComponent(name) + '/' + 'pause'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildCcrPauseAutoFollowPattern
|
||||
77
api/api/ccr.resume_auto_follow_pattern.js
Normal file
77
api/api/ccr.resume_auto_follow_pattern.js
Normal file
@ -0,0 +1,77 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildCcrResumeAutoFollowPattern (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a ccr.resume_auto_follow_pattern request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/ccr-resume-auto-follow-pattern.html
|
||||
*/
|
||||
return function ccrResumeAutoFollowPattern (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_ccr' + '/' + 'auto_follow' + '/' + encodeURIComponent(name) + '/' + 'resume'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildCcrResumeAutoFollowPattern
|
||||
76
api/api/enrich.delete_policy.js
Normal file
76
api/api/enrich.delete_policy.js
Normal file
@ -0,0 +1,76 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildEnrichDeletePolicy (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a enrich.delete_policy request
|
||||
*/
|
||||
return function enrichDeletePolicy (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_enrich' + '/' + 'policy' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildEnrichDeletePolicy
|
||||
76
api/api/enrich.execute_policy.js
Normal file
76
api/api/enrich.execute_policy.js
Normal file
@ -0,0 +1,76 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildEnrichExecutePolicy (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
'wait_for_completion'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
waitForCompletion: 'wait_for_completion'
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a enrich.execute_policy request
|
||||
*/
|
||||
return function enrichExecutePolicy (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + '_enrich' + '/' + 'policy' + '/' + encodeURIComponent(name) + '/' + '_execute'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildEnrichExecutePolicy
|
||||
75
api/api/enrich.get_policy.js
Normal file
75
api/api/enrich.get_policy.js
Normal file
@ -0,0 +1,75 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildEnrichGetPolicy (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a enrich.get_policy request
|
||||
*/
|
||||
return function enrichGetPolicy (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if ((name) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_enrich' + '/' + 'policy' + '/' + encodeURIComponent(name)
|
||||
} else {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_enrich' + '/' + 'policy'
|
||||
}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildEnrichGetPolicy
|
||||
80
api/api/enrich.put_policy.js
Normal file
80
api/api/enrich.put_policy.js
Normal file
@ -0,0 +1,80 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildEnrichPutPolicy (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a enrich.put_policy request
|
||||
*/
|
||||
return function enrichPutPolicy (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// check required parameters
|
||||
if (params['name'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: name')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
if (params['body'] == null) {
|
||||
const err = new ConfigurationError('Missing required parameter: body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, name, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + '_enrich' + '/' + 'policy' + '/' + encodeURIComponent(name)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildEnrichPutPolicy
|
||||
70
api/api/enrich.stats.js
Normal file
70
api/api/enrich.stats.js
Normal file
@ -0,0 +1,70 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildEnrichStats (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a enrich.stats request
|
||||
*/
|
||||
return function enrichStats (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_enrich' + '/' + '_stats'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildEnrichStats
|
||||
@ -84,10 +84,10 @@ function buildIndex (opts) {
|
||||
var path = ''
|
||||
|
||||
if ((index) != null && (type) != null && (id) != null) {
|
||||
if (method == null) method = 'POST'
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + encodeURIComponent(index) + '/' + encodeURIComponent(type) + '/' + encodeURIComponent(id)
|
||||
} else if ((index) != null && (id) != null) {
|
||||
if (method == null) method = 'POST'
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_doc' + '/' + encodeURIComponent(id)
|
||||
} else if ((index) != null && (type) != null) {
|
||||
if (method == null) method = 'POST'
|
||||
|
||||
@ -79,10 +79,10 @@ function buildIndicesDeleteAlias (opts) {
|
||||
|
||||
if ((index) != null && (name) != null) {
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_aliases' + '/' + encodeURIComponent(name)
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_alias' + '/' + encodeURIComponent(name)
|
||||
} else {
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_alias' + '/' + encodeURIComponent(name)
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_aliases' + '/' + encodeURIComponent(name)
|
||||
}
|
||||
|
||||
// build request object
|
||||
|
||||
@ -79,10 +79,10 @@ function buildIndicesPutAlias (opts) {
|
||||
|
||||
if ((index) != null && (name) != null) {
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_aliases' + '/' + encodeURIComponent(name)
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_alias' + '/' + encodeURIComponent(name)
|
||||
} else {
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_alias' + '/' + encodeURIComponent(name)
|
||||
path = '/' + encodeURIComponent(index) + '/' + '_aliases' + '/' + encodeURIComponent(name)
|
||||
}
|
||||
|
||||
// build request object
|
||||
|
||||
@ -67,16 +67,16 @@ function buildNodesHotThreads (opts) {
|
||||
|
||||
if ((node_id || nodeId) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_cluster' + '/' + 'nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hot_threads'
|
||||
} else if ((node_id || nodeId) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hotthreads'
|
||||
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hot_threads'
|
||||
} else if ((node_id || nodeId) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_cluster' + '/' + 'nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hotthreads'
|
||||
} else if ((node_id || nodeId) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hot_threads'
|
||||
path = '/' + '_nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hotthreads'
|
||||
} else if ((node_id || nodeId) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_cluster' + '/' + 'nodes' + '/' + encodeURIComponent(node_id || nodeId) + '/' + 'hot_threads'
|
||||
} else {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_nodes' + '/' + 'hot_threads'
|
||||
|
||||
@ -15,11 +15,13 @@ function buildSecurityGetApiKey (opts) {
|
||||
'id',
|
||||
'name',
|
||||
'username',
|
||||
'realm_name'
|
||||
'realm_name',
|
||||
'owner'
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
realmName: 'realm_name'
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
71
api/api/slm.execute_retention.js
Normal file
71
api/api/slm.execute_retention.js
Normal file
@ -0,0 +1,71 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildSlmExecuteRetention (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a slm.execute_retention request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-execute-retention.html
|
||||
*/
|
||||
return function slmExecuteRetention (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_slm' + '/' + '_execute_retention'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: body || '',
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildSlmExecuteRetention
|
||||
71
api/api/slm.get_stats.js
Normal file
71
api/api/slm.get_stats.js
Normal file
@ -0,0 +1,71 @@
|
||||
// Licensed to Elasticsearch B.V under one or more agreements.
|
||||
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
|
||||
// See the LICENSE file in the project root for more information
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildSlmGetStats (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
const acceptedQuerystring = [
|
||||
|
||||
]
|
||||
|
||||
const snakeCase = {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a slm.get_stats request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/master/slm-get-stats.html
|
||||
*/
|
||||
return function slmGetStats (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
options = {}
|
||||
}
|
||||
if (typeof params === 'function' || params == null) {
|
||||
callback = params
|
||||
params = {}
|
||||
options = {}
|
||||
}
|
||||
|
||||
// validate headers object
|
||||
if (options.headers != null && typeof options.headers !== 'object') {
|
||||
const err = new ConfigurationError(`Headers should be an object, instead got: ${typeof options.headers}`)
|
||||
return handleError(err, callback)
|
||||
}
|
||||
|
||||
var warnings = []
|
||||
var { method, body, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring, warnings)
|
||||
|
||||
var ignore = options.ignore
|
||||
if (typeof ignore === 'number') {
|
||||
options.ignore = [ignore]
|
||||
}
|
||||
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_slm' + '/' + 'stats'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
body: null,
|
||||
querystring
|
||||
}
|
||||
|
||||
options.warnings = warnings.length === 0 ? null : warnings
|
||||
return makeRequest(request, options, callback)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildSlmGetStats
|
||||
@ -7,7 +7,7 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDataFrameDeleteDataFrameTransform (opts) {
|
||||
function buildTransformDeleteTransform (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
@ -20,10 +20,10 @@ function buildDataFrameDeleteDataFrameTransform (opts) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a data_frame.delete_data_frame_transform request
|
||||
* Perform a transform.delete_transform request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html
|
||||
*/
|
||||
return function dataFrameDeleteDataFrameTransform (params, options, callback) {
|
||||
return function transformDeleteTransform (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -59,7 +59,7 @@ function buildDataFrameDeleteDataFrameTransform (opts) {
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'DELETE'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId)
|
||||
path = '/' + '_transform' + '/' + encodeURIComponent(transform_id || transformId)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
@ -74,4 +74,4 @@ function buildDataFrameDeleteDataFrameTransform (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDataFrameDeleteDataFrameTransform
|
||||
module.exports = buildTransformDeleteTransform
|
||||
@ -7,7 +7,7 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDataFrameGetDataFrameTransform (opts) {
|
||||
function buildTransformGetTransform (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
@ -22,10 +22,10 @@ function buildDataFrameGetDataFrameTransform (opts) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a data_frame.get_data_frame_transform request
|
||||
* Perform a transform.get_transform request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform.html
|
||||
*/
|
||||
return function dataFrameGetDataFrameTransform (params, options, callback) {
|
||||
return function transformGetTransform (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -56,10 +56,10 @@ function buildDataFrameGetDataFrameTransform (opts) {
|
||||
|
||||
if ((transform_id || transformId) != null) {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId)
|
||||
path = '/' + '_transform' + '/' + encodeURIComponent(transform_id || transformId)
|
||||
} else {
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms'
|
||||
path = '/' + '_transform'
|
||||
}
|
||||
|
||||
// build request object
|
||||
@ -75,4 +75,4 @@ function buildDataFrameGetDataFrameTransform (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDataFrameGetDataFrameTransform
|
||||
module.exports = buildTransformGetTransform
|
||||
@ -7,7 +7,7 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDataFrameGetDataFrameTransformStats (opts) {
|
||||
function buildTransformGetTransformStats (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
@ -22,10 +22,10 @@ function buildDataFrameGetDataFrameTransformStats (opts) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a data_frame.get_data_frame_transform_stats request
|
||||
* Perform a transform.get_transform_stats request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html
|
||||
*/
|
||||
return function dataFrameGetDataFrameTransformStats (params, options, callback) {
|
||||
return function transformGetTransformStats (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -61,7 +61,7 @@ function buildDataFrameGetDataFrameTransformStats (opts) {
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'GET'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_stats'
|
||||
path = '/' + '_transform' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_stats'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
@ -76,4 +76,4 @@ function buildDataFrameGetDataFrameTransformStats (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDataFrameGetDataFrameTransformStats
|
||||
module.exports = buildTransformGetTransformStats
|
||||
@ -7,7 +7,7 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDataFramePreviewDataFrameTransform (opts) {
|
||||
function buildTransformPreviewTransform (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
@ -20,10 +20,10 @@ function buildDataFramePreviewDataFrameTransform (opts) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a data_frame.preview_data_frame_transform request
|
||||
* Perform a transform.preview_transform request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-transform.html
|
||||
*/
|
||||
return function dataFramePreviewDataFrameTransform (params, options, callback) {
|
||||
return function transformPreviewTransform (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -59,7 +59,7 @@ function buildDataFramePreviewDataFrameTransform (opts) {
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + '_preview'
|
||||
path = '/' + '_transform' + '/' + '_preview'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
@ -74,4 +74,4 @@ function buildDataFramePreviewDataFrameTransform (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDataFramePreviewDataFrameTransform
|
||||
module.exports = buildTransformPreviewTransform
|
||||
@ -7,7 +7,7 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDataFramePutDataFrameTransform (opts) {
|
||||
function buildTransformPutTransform (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
@ -20,10 +20,10 @@ function buildDataFramePutDataFrameTransform (opts) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a data_frame.put_data_frame_transform request
|
||||
* Perform a transform.put_transform request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html
|
||||
*/
|
||||
return function dataFramePutDataFrameTransform (params, options, callback) {
|
||||
return function transformPutTransform (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -63,7 +63,7 @@ function buildDataFramePutDataFrameTransform (opts) {
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'PUT'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId)
|
||||
path = '/' + '_transform' + '/' + encodeURIComponent(transform_id || transformId)
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
@ -78,4 +78,4 @@ function buildDataFramePutDataFrameTransform (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDataFramePutDataFrameTransform
|
||||
module.exports = buildTransformPutTransform
|
||||
@ -7,7 +7,7 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDataFrameStartDataFrameTransform (opts) {
|
||||
function buildTransformStartTransform (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
@ -20,10 +20,10 @@ function buildDataFrameStartDataFrameTransform (opts) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a data_frame.start_data_frame_transform request
|
||||
* Perform a transform.start_transform request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html
|
||||
*/
|
||||
return function dataFrameStartDataFrameTransform (params, options, callback) {
|
||||
return function transformStartTransform (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -59,7 +59,7 @@ function buildDataFrameStartDataFrameTransform (opts) {
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_start'
|
||||
path = '/' + '_transform' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_start'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
@ -74,4 +74,4 @@ function buildDataFrameStartDataFrameTransform (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDataFrameStartDataFrameTransform
|
||||
module.exports = buildTransformStartTransform
|
||||
@ -7,7 +7,7 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDataFrameStopDataFrameTransform (opts) {
|
||||
function buildTransformStopTransform (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
@ -23,10 +23,10 @@ function buildDataFrameStopDataFrameTransform (opts) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a data_frame.stop_data_frame_transform request
|
||||
* Perform a transform.stop_transform request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-transform.html
|
||||
*/
|
||||
return function dataFrameStopDataFrameTransform (params, options, callback) {
|
||||
return function transformStopTransform (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -62,7 +62,7 @@ function buildDataFrameStopDataFrameTransform (opts) {
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_stop'
|
||||
path = '/' + '_transform' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_stop'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
@ -77,4 +77,4 @@ function buildDataFrameStopDataFrameTransform (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDataFrameStopDataFrameTransform
|
||||
module.exports = buildTransformStopTransform
|
||||
@ -7,7 +7,7 @@
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
function buildDataFrameUpdateDataFrameTransform (opts) {
|
||||
function buildTransformUpdateTransform (opts) {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const { makeRequest, ConfigurationError, handleError, snakeCaseKeys } = opts
|
||||
|
||||
@ -20,10 +20,10 @@ function buildDataFrameUpdateDataFrameTransform (opts) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform a data_frame.update_data_frame_transform request
|
||||
* Perform a transform.update_transform request
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html
|
||||
*/
|
||||
return function dataFrameUpdateDataFrameTransform (params, options, callback) {
|
||||
return function transformUpdateTransform (params, options, callback) {
|
||||
options = options || {}
|
||||
if (typeof options === 'function') {
|
||||
callback = options
|
||||
@ -63,7 +63,7 @@ function buildDataFrameUpdateDataFrameTransform (opts) {
|
||||
var path = ''
|
||||
|
||||
if (method == null) method = 'POST'
|
||||
path = '/' + '_data_frame' + '/' + 'transforms' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_update'
|
||||
path = '/' + '_transform' + '/' + encodeURIComponent(transform_id || transformId) + '/' + '_update'
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
@ -78,4 +78,4 @@ function buildDataFrameUpdateDataFrameTransform (opts) {
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = buildDataFrameUpdateDataFrameTransform
|
||||
module.exports = buildTransformUpdateTransform
|
||||
73
api/index.js
73
api/index.js
@ -53,10 +53,14 @@ function ESAPI (opts) {
|
||||
forgetFollower: lazyLoad('ccr.forget_follower', opts),
|
||||
get_auto_follow_pattern: lazyLoad('ccr.get_auto_follow_pattern', opts),
|
||||
getAutoFollowPattern: lazyLoad('ccr.get_auto_follow_pattern', opts),
|
||||
pause_auto_follow_pattern: lazyLoad('ccr.pause_auto_follow_pattern', opts),
|
||||
pauseAutoFollowPattern: lazyLoad('ccr.pause_auto_follow_pattern', opts),
|
||||
pause_follow: lazyLoad('ccr.pause_follow', opts),
|
||||
pauseFollow: lazyLoad('ccr.pause_follow', opts),
|
||||
put_auto_follow_pattern: lazyLoad('ccr.put_auto_follow_pattern', opts),
|
||||
putAutoFollowPattern: lazyLoad('ccr.put_auto_follow_pattern', opts),
|
||||
resume_auto_follow_pattern: lazyLoad('ccr.resume_auto_follow_pattern', opts),
|
||||
resumeAutoFollowPattern: lazyLoad('ccr.resume_auto_follow_pattern', opts),
|
||||
resume_follow: lazyLoad('ccr.resume_follow', opts),
|
||||
resumeFollow: lazyLoad('ccr.resume_follow', opts),
|
||||
stats: lazyLoad('ccr.stats', opts),
|
||||
@ -82,42 +86,6 @@ function ESAPI (opts) {
|
||||
},
|
||||
count: lazyLoad('count', opts),
|
||||
create: lazyLoad('create', opts),
|
||||
data_frame: {
|
||||
delete_data_frame_transform: lazyLoad('data_frame.delete_data_frame_transform', opts),
|
||||
deleteDataFrameTransform: lazyLoad('data_frame.delete_data_frame_transform', opts),
|
||||
get_data_frame_transform: lazyLoad('data_frame.get_data_frame_transform', opts),
|
||||
getDataFrameTransform: lazyLoad('data_frame.get_data_frame_transform', opts),
|
||||
get_data_frame_transform_stats: lazyLoad('data_frame.get_data_frame_transform_stats', opts),
|
||||
getDataFrameTransformStats: lazyLoad('data_frame.get_data_frame_transform_stats', opts),
|
||||
preview_data_frame_transform: lazyLoad('data_frame.preview_data_frame_transform', opts),
|
||||
previewDataFrameTransform: lazyLoad('data_frame.preview_data_frame_transform', opts),
|
||||
put_data_frame_transform: lazyLoad('data_frame.put_data_frame_transform', opts),
|
||||
putDataFrameTransform: lazyLoad('data_frame.put_data_frame_transform', opts),
|
||||
start_data_frame_transform: lazyLoad('data_frame.start_data_frame_transform', opts),
|
||||
startDataFrameTransform: lazyLoad('data_frame.start_data_frame_transform', opts),
|
||||
stop_data_frame_transform: lazyLoad('data_frame.stop_data_frame_transform', opts),
|
||||
stopDataFrameTransform: lazyLoad('data_frame.stop_data_frame_transform', opts),
|
||||
update_data_frame_transform: lazyLoad('data_frame.update_data_frame_transform', opts),
|
||||
updateDataFrameTransform: lazyLoad('data_frame.update_data_frame_transform', opts)
|
||||
},
|
||||
dataFrame: {
|
||||
delete_data_frame_transform: lazyLoad('data_frame.delete_data_frame_transform', opts),
|
||||
deleteDataFrameTransform: lazyLoad('data_frame.delete_data_frame_transform', opts),
|
||||
get_data_frame_transform: lazyLoad('data_frame.get_data_frame_transform', opts),
|
||||
getDataFrameTransform: lazyLoad('data_frame.get_data_frame_transform', opts),
|
||||
get_data_frame_transform_stats: lazyLoad('data_frame.get_data_frame_transform_stats', opts),
|
||||
getDataFrameTransformStats: lazyLoad('data_frame.get_data_frame_transform_stats', opts),
|
||||
preview_data_frame_transform: lazyLoad('data_frame.preview_data_frame_transform', opts),
|
||||
previewDataFrameTransform: lazyLoad('data_frame.preview_data_frame_transform', opts),
|
||||
put_data_frame_transform: lazyLoad('data_frame.put_data_frame_transform', opts),
|
||||
putDataFrameTransform: lazyLoad('data_frame.put_data_frame_transform', opts),
|
||||
start_data_frame_transform: lazyLoad('data_frame.start_data_frame_transform', opts),
|
||||
startDataFrameTransform: lazyLoad('data_frame.start_data_frame_transform', opts),
|
||||
stop_data_frame_transform: lazyLoad('data_frame.stop_data_frame_transform', opts),
|
||||
stopDataFrameTransform: lazyLoad('data_frame.stop_data_frame_transform', opts),
|
||||
update_data_frame_transform: lazyLoad('data_frame.update_data_frame_transform', opts),
|
||||
updateDataFrameTransform: lazyLoad('data_frame.update_data_frame_transform', opts)
|
||||
},
|
||||
delete: lazyLoad('delete', opts),
|
||||
delete_by_query: lazyLoad('delete_by_query', opts),
|
||||
deleteByQuery: lazyLoad('delete_by_query', opts),
|
||||
@ -125,6 +93,17 @@ function ESAPI (opts) {
|
||||
deleteByQueryRethrottle: lazyLoad('delete_by_query_rethrottle', opts),
|
||||
delete_script: lazyLoad('delete_script', opts),
|
||||
deleteScript: lazyLoad('delete_script', opts),
|
||||
enrich: {
|
||||
delete_policy: lazyLoad('enrich.delete_policy', opts),
|
||||
deletePolicy: lazyLoad('enrich.delete_policy', opts),
|
||||
execute_policy: lazyLoad('enrich.execute_policy', opts),
|
||||
executePolicy: lazyLoad('enrich.execute_policy', opts),
|
||||
get_policy: lazyLoad('enrich.get_policy', opts),
|
||||
getPolicy: lazyLoad('enrich.get_policy', opts),
|
||||
put_policy: lazyLoad('enrich.put_policy', opts),
|
||||
putPolicy: lazyLoad('enrich.put_policy', opts),
|
||||
stats: lazyLoad('enrich.stats', opts)
|
||||
},
|
||||
exists: lazyLoad('exists', opts),
|
||||
exists_source: lazyLoad('exists_source', opts),
|
||||
existsSource: lazyLoad('exists_source', opts),
|
||||
@ -469,8 +448,12 @@ function ESAPI (opts) {
|
||||
deleteLifecycle: lazyLoad('slm.delete_lifecycle', opts),
|
||||
execute_lifecycle: lazyLoad('slm.execute_lifecycle', opts),
|
||||
executeLifecycle: lazyLoad('slm.execute_lifecycle', opts),
|
||||
execute_retention: lazyLoad('slm.execute_retention', opts),
|
||||
executeRetention: lazyLoad('slm.execute_retention', opts),
|
||||
get_lifecycle: lazyLoad('slm.get_lifecycle', opts),
|
||||
getLifecycle: lazyLoad('slm.get_lifecycle', opts),
|
||||
get_stats: lazyLoad('slm.get_stats', opts),
|
||||
getStats: lazyLoad('slm.get_stats', opts),
|
||||
put_lifecycle: lazyLoad('slm.put_lifecycle', opts),
|
||||
putLifecycle: lazyLoad('slm.put_lifecycle', opts)
|
||||
},
|
||||
@ -506,6 +489,24 @@ function ESAPI (opts) {
|
||||
list: lazyLoad('tasks.list', opts)
|
||||
},
|
||||
termvectors: lazyLoad('termvectors', opts),
|
||||
transform: {
|
||||
delete_transform: lazyLoad('transform.delete_transform', opts),
|
||||
deleteTransform: lazyLoad('transform.delete_transform', opts),
|
||||
get_transform: lazyLoad('transform.get_transform', opts),
|
||||
getTransform: lazyLoad('transform.get_transform', opts),
|
||||
get_transform_stats: lazyLoad('transform.get_transform_stats', opts),
|
||||
getTransformStats: lazyLoad('transform.get_transform_stats', opts),
|
||||
preview_transform: lazyLoad('transform.preview_transform', opts),
|
||||
previewTransform: lazyLoad('transform.preview_transform', opts),
|
||||
put_transform: lazyLoad('transform.put_transform', opts),
|
||||
putTransform: lazyLoad('transform.put_transform', opts),
|
||||
start_transform: lazyLoad('transform.start_transform', opts),
|
||||
startTransform: lazyLoad('transform.start_transform', opts),
|
||||
stop_transform: lazyLoad('transform.stop_transform', opts),
|
||||
stopTransform: lazyLoad('transform.stop_transform', opts),
|
||||
update_transform: lazyLoad('transform.update_transform', opts),
|
||||
updateTransform: lazyLoad('transform.update_transform', opts)
|
||||
},
|
||||
update: lazyLoad('update', opts),
|
||||
update_by_query: lazyLoad('update_by_query', opts),
|
||||
updateByQuery: lazyLoad('update_by_query', opts),
|
||||
|
||||
139
api/requestParams.d.ts
vendored
139
api/requestParams.d.ts
vendored
@ -32,7 +32,6 @@ export interface CatAliases extends Generic {
|
||||
name?: string | string[];
|
||||
format?: string;
|
||||
local?: boolean;
|
||||
master_timeout?: string;
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
@ -54,8 +53,6 @@ export interface CatAllocation extends Generic {
|
||||
export interface CatCount extends Generic {
|
||||
index?: string | string[];
|
||||
format?: string;
|
||||
local?: boolean;
|
||||
master_timeout?: string;
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
@ -66,8 +63,6 @@ export interface CatFielddata extends Generic {
|
||||
fields?: string | string[];
|
||||
format?: string;
|
||||
bytes?: 'b' | 'k' | 'kb' | 'm' | 'mb' | 'g' | 'gb' | 't' | 'tb' | 'p' | 'pb';
|
||||
local?: boolean;
|
||||
master_timeout?: string;
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
@ -76,11 +71,10 @@ export interface CatFielddata extends Generic {
|
||||
|
||||
export interface CatHealth extends Generic {
|
||||
format?: string;
|
||||
local?: boolean;
|
||||
master_timeout?: string;
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
time?: 'd (Days)' | 'h (Hours)' | 'm (Minutes)' | 's (Seconds)' | 'ms (Milliseconds)' | 'micros (Microseconds)' | 'nanos (Nanoseconds)';
|
||||
ts?: boolean;
|
||||
v?: boolean;
|
||||
}
|
||||
@ -101,6 +95,7 @@ export interface CatIndices extends Generic {
|
||||
help?: boolean;
|
||||
pri?: boolean;
|
||||
s?: string | string[];
|
||||
time?: 'd (Days)' | 'h (Hours)' | 'm (Minutes)' | 's (Seconds)' | 'ms (Milliseconds)' | 'micros (Microseconds)' | 'nanos (Nanoseconds)';
|
||||
v?: boolean;
|
||||
include_unloaded_segments?: boolean;
|
||||
}
|
||||
@ -126,6 +121,7 @@ export interface CatNodeattrs extends Generic {
|
||||
}
|
||||
|
||||
export interface CatNodes extends Generic {
|
||||
bytes?: 'b' | 'k' | 'kb' | 'm' | 'mb' | 'g' | 'gb' | 't' | 'tb' | 'p' | 'pb';
|
||||
format?: string;
|
||||
full_id?: boolean;
|
||||
local?: boolean;
|
||||
@ -133,6 +129,7 @@ export interface CatNodes extends Generic {
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
time?: 'd (Days)' | 'h (Hours)' | 'm (Minutes)' | 's (Seconds)' | 'ms (Milliseconds)' | 'micros (Microseconds)' | 'nanos (Nanoseconds)';
|
||||
v?: boolean;
|
||||
}
|
||||
|
||||
@ -143,6 +140,7 @@ export interface CatPendingTasks extends Generic {
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
time?: 'd (Days)' | 'h (Hours)' | 'm (Minutes)' | 's (Seconds)' | 'ms (Milliseconds)' | 'micros (Microseconds)' | 'nanos (Nanoseconds)';
|
||||
v?: boolean;
|
||||
}
|
||||
|
||||
@ -159,11 +157,13 @@ export interface CatPlugins extends Generic {
|
||||
export interface CatRecovery extends Generic {
|
||||
index?: string | string[];
|
||||
format?: string;
|
||||
active_only?: boolean;
|
||||
bytes?: 'b' | 'k' | 'kb' | 'm' | 'mb' | 'g' | 'gb' | 't' | 'tb' | 'p' | 'pb';
|
||||
master_timeout?: string;
|
||||
detailed?: boolean;
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
time?: 'd (Days)' | 'h (Hours)' | 'm (Minutes)' | 's (Seconds)' | 'ms (Milliseconds)' | 'micros (Microseconds)' | 'nanos (Nanoseconds)';
|
||||
v?: boolean;
|
||||
}
|
||||
|
||||
@ -196,6 +196,7 @@ export interface CatShards extends Generic {
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
time?: 'd (Days)' | 'h (Hours)' | 'm (Minutes)' | 's (Seconds)' | 'ms (Milliseconds)' | 'micros (Microseconds)' | 'nanos (Nanoseconds)';
|
||||
v?: boolean;
|
||||
}
|
||||
|
||||
@ -207,6 +208,7 @@ export interface CatSnapshots extends Generic {
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
time?: 'd (Days)' | 'h (Hours)' | 'm (Minutes)' | 's (Seconds)' | 'ms (Milliseconds)' | 'micros (Microseconds)' | 'nanos (Nanoseconds)';
|
||||
v?: boolean;
|
||||
}
|
||||
|
||||
@ -219,6 +221,7 @@ export interface CatTasks extends Generic {
|
||||
h?: string | string[];
|
||||
help?: boolean;
|
||||
s?: string | string[];
|
||||
time?: 'd (Days)' | 'h (Hours)' | 'm (Minutes)' | 's (Seconds)' | 'ms (Milliseconds)' | 'micros (Microseconds)' | 'nanos (Nanoseconds)';
|
||||
v?: boolean;
|
||||
}
|
||||
|
||||
@ -351,7 +354,7 @@ export interface Create<T = any> extends Generic {
|
||||
routing?: string;
|
||||
timeout?: string;
|
||||
version?: number;
|
||||
version_type?: 'internal' | 'external' | 'external_gte' | 'force';
|
||||
version_type?: 'internal' | 'external' | 'external_gte';
|
||||
pipeline?: string;
|
||||
body: T;
|
||||
}
|
||||
@ -536,7 +539,7 @@ export interface Index<T = any> extends Generic {
|
||||
routing?: string;
|
||||
timeout?: string;
|
||||
version?: number;
|
||||
version_type?: 'internal' | 'external' | 'external_gte' | 'force';
|
||||
version_type?: 'internal' | 'external' | 'external_gte';
|
||||
if_seq_no?: number;
|
||||
if_primary_term?: number;
|
||||
pipeline?: string;
|
||||
@ -1367,6 +1370,10 @@ export interface CcrGetAutoFollowPattern extends Generic {
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export interface CcrPauseAutoFollowPattern extends Generic {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface CcrPauseFollow extends Generic {
|
||||
index: string;
|
||||
}
|
||||
@ -1376,6 +1383,10 @@ export interface CcrPutAutoFollowPattern<T = any> extends Generic {
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface CcrResumeAutoFollowPattern extends Generic {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface CcrResumeFollow<T = any> extends Generic {
|
||||
index: string;
|
||||
body?: T;
|
||||
@ -1388,53 +1399,27 @@ export interface CcrUnfollow extends Generic {
|
||||
index: string;
|
||||
}
|
||||
|
||||
export interface DataFrameDeleteDataFrameTransform extends Generic {
|
||||
transform_id: string;
|
||||
force?: boolean;
|
||||
export interface EnrichDeletePolicy extends Generic {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface DataFrameGetDataFrameTransform extends Generic {
|
||||
transform_id?: string;
|
||||
from?: number;
|
||||
size?: number;
|
||||
allow_no_match?: boolean;
|
||||
}
|
||||
|
||||
export interface DataFrameGetDataFrameTransformStats extends Generic {
|
||||
transform_id: string;
|
||||
from?: number;
|
||||
size?: number;
|
||||
allow_no_match?: boolean;
|
||||
}
|
||||
|
||||
export interface DataFramePreviewDataFrameTransform<T = any> extends Generic {
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface DataFramePutDataFrameTransform<T = any> extends Generic {
|
||||
transform_id: string;
|
||||
defer_validation?: boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface DataFrameStartDataFrameTransform extends Generic {
|
||||
transform_id: string;
|
||||
timeout?: string;
|
||||
}
|
||||
|
||||
export interface DataFrameStopDataFrameTransform extends Generic {
|
||||
transform_id: string;
|
||||
export interface EnrichExecutePolicy extends Generic {
|
||||
name: string;
|
||||
wait_for_completion?: boolean;
|
||||
timeout?: string;
|
||||
allow_no_match?: boolean;
|
||||
}
|
||||
|
||||
export interface DataFrameUpdateDataFrameTransform<T = any> extends Generic {
|
||||
transform_id: string;
|
||||
defer_validation?: boolean;
|
||||
export interface EnrichGetPolicy extends Generic {
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export interface EnrichPutPolicy<T = any> extends Generic {
|
||||
name: string;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface EnrichStats extends Generic {
|
||||
}
|
||||
|
||||
export interface GraphExplore<T = any> extends Generic {
|
||||
index: string | string[];
|
||||
type?: string | string[];
|
||||
@ -1998,6 +1983,7 @@ export interface SecurityGetApiKey extends Generic {
|
||||
name?: string;
|
||||
username?: string;
|
||||
realm_name?: string;
|
||||
owner?: boolean;
|
||||
}
|
||||
|
||||
export interface SecurityGetBuiltinPrivileges extends Generic {
|
||||
@ -2071,8 +2057,14 @@ export interface SlmExecuteLifecycle extends Generic {
|
||||
policy_id: string;
|
||||
}
|
||||
|
||||
export interface SlmExecuteRetention extends Generic {
|
||||
}
|
||||
|
||||
export interface SlmGetLifecycle extends Generic {
|
||||
policy_id?: string;
|
||||
policy_id?: string | string[];
|
||||
}
|
||||
|
||||
export interface SlmGetStats extends Generic {
|
||||
}
|
||||
|
||||
export interface SlmPutLifecycle<T = any> extends Generic {
|
||||
@ -2096,6 +2088,53 @@ export interface SqlTranslate<T = any> extends Generic {
|
||||
export interface SslCertificates extends Generic {
|
||||
}
|
||||
|
||||
export interface TransformDeleteTransform extends Generic {
|
||||
transform_id: string;
|
||||
force?: boolean;
|
||||
}
|
||||
|
||||
export interface TransformGetTransform extends Generic {
|
||||
transform_id?: string;
|
||||
from?: number;
|
||||
size?: number;
|
||||
allow_no_match?: boolean;
|
||||
}
|
||||
|
||||
export interface TransformGetTransformStats extends Generic {
|
||||
transform_id: string;
|
||||
from?: number;
|
||||
size?: number;
|
||||
allow_no_match?: boolean;
|
||||
}
|
||||
|
||||
export interface TransformPreviewTransform<T = any> extends Generic {
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface TransformPutTransform<T = any> extends Generic {
|
||||
transform_id: string;
|
||||
defer_validation?: boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface TransformStartTransform extends Generic {
|
||||
transform_id: string;
|
||||
timeout?: string;
|
||||
}
|
||||
|
||||
export interface TransformStopTransform extends Generic {
|
||||
transform_id: string;
|
||||
wait_for_completion?: boolean;
|
||||
timeout?: string;
|
||||
allow_no_match?: boolean;
|
||||
}
|
||||
|
||||
export interface TransformUpdateTransform<T = any> extends Generic {
|
||||
transform_id: string;
|
||||
defer_validation?: boolean;
|
||||
body: T;
|
||||
}
|
||||
|
||||
export interface WatcherAckWatch extends Generic {
|
||||
watch_id: string;
|
||||
action_id?: string | string[];
|
||||
|
||||
@ -1,19 +1,25 @@
|
||||
[[auth-reference]]
|
||||
== Authentication
|
||||
|
||||
This document contains code snippets to show you how to connect to various Elasticsearch providers.
|
||||
This document contains code snippets to show you how to connect to various {es}
|
||||
providers.
|
||||
|
||||
|
||||
=== Elastic Cloud
|
||||
|
||||
If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers a easy way to connect to it via the `cloud` option. +
|
||||
You must pass the Cloud ID that you can find in the cloud console, then your username and password inside the `auth` option.
|
||||
If you are using https://www.elastic.co/cloud[Elastic Cloud], the client offers
|
||||
an easy way to connect to it via the `cloud` option. You must pass the Cloud ID
|
||||
that you can find in the cloud console, then your username and password inside
|
||||
the `auth` option.
|
||||
|
||||
NOTE: When connecting to Elastic Cloud, the client will automatically enable both request and response compression by default, since it yields significant throughput improvements. +
|
||||
Moreover, the client will also set the ssl option `secureProtocol` to `TLSv1_2_method` unless specified otherwise.
|
||||
You can still override this option by configuring them.
|
||||
NOTE: When connecting to Elastic Cloud, the client will automatically enable
|
||||
both request and response compression by default, since it yields significant
|
||||
throughput improvements. Moreover, the client will also set the ssl option
|
||||
`secureProtocol` to `TLSv1_2_method` unless specified otherwise. You can still
|
||||
override this option by configuring them.
|
||||
|
||||
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are behind a load balancer, Elastic Cloud will take care of everything for you.
|
||||
IMPORTANT: Do not enable sniffing when using Elastic Cloud, since the nodes are
|
||||
behind a load balancer, Elastic Cloud will take care of everything for you.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -29,9 +35,11 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== Basic authentication
|
||||
|
||||
You can provide your credentials by passing the `username` and `password` parameters via the `auth` option.
|
||||
You can provide your credentials by passing the `username` and `password`
|
||||
parameters via the `auth` option.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -45,6 +53,7 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
Otherwise, you can provide your credentials in the node(s) URL.
|
||||
|
||||
[source,js]
|
||||
@ -55,10 +64,15 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== ApiKey authentication
|
||||
|
||||
You can use the https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[ApiKey] authentication by passing the `apiKey` parameter via the `auth` option. +
|
||||
The `apiKey` parameter can be either a base64 encoded string or an object with the values that you can obtain from the https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[create api key endpoint].
|
||||
You can use the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[ApiKey]
|
||||
authentication by passing the `apiKey` parameter via the `auth` option. The
|
||||
`apiKey` parameter can be either a base64 encoded string or an object with the
|
||||
values that you can obtain from the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[create api key endpoint].
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -88,7 +102,14 @@ const client = new Client({
|
||||
|
||||
=== SSL configuration
|
||||
|
||||
Without any additional configuration you can specify `https://` node urls, but the certificates used to sign these requests will not verified (`rejectUnauthorized: false`). To turn on certificate verification you must specify an `ssl` object either in the top level config or in each host config object and set `rejectUnauthorized: true`. The ssl config object can contain many of the same configuration options that https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[tls.connect()] accepts.
|
||||
Without any additional configuration you can specify `https://` node urls, but
|
||||
the certificates used to sign these requests will not verified
|
||||
(`rejectUnauthorized: false`). To turn on certificate verification, you must
|
||||
specify an `ssl` object either in the top level config or in each host config
|
||||
object and set `rejectUnauthorized: true`. The ssl config object can contain
|
||||
many of the same configuration options that
|
||||
https://nodejs.org/api/tls.html#tls_tls_connect_options_callback[tls.connect()]
|
||||
accepts.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,27 +1,44 @@
|
||||
[[breaking-changes]]
|
||||
== Breaking changes coming from the old client
|
||||
|
||||
If you were already using the previous version of this client --i.e. the one you used to install with `npm install elasticsearch`-- you will encounter some breaking changes.
|
||||
If you were already using the previous version of this client – the one you used
|
||||
to install with `npm install elasticsearch` – you will encounter some breaking
|
||||
changes.
|
||||
|
||||
|
||||
=== Don’t panic!
|
||||
|
||||
Every breaking change was carefully weighed, and each is justified. Furthermore, the new codebase has been rewritten with modern JavaScript and has been carefully designed to be easy to maintain.
|
||||
Every breaking change was carefully weighed, and each is justified. Furthermore,
|
||||
the new codebase has been rewritten with modern JavaScript and has been
|
||||
carefully designed to be easy to maintain.
|
||||
|
||||
|
||||
=== Breaking changes
|
||||
|
||||
* Minimum supported version of Node.js is `v8`.
|
||||
|
||||
* Everything has been rewritten using ES6 classes to help users extend the defaults more easily.
|
||||
* Everything has been rewritten using ES6 classes to help users extend the
|
||||
defaults more easily.
|
||||
|
||||
* There is no longer an integrated logger. The client now is an event emitter that emits the following events: `request`, `response`, and `error`.
|
||||
* There is no longer an integrated logger. The client now is an event emitter
|
||||
that emits the following events: `request`, `response`, and `error`.
|
||||
|
||||
* The code is no longer shipped with all the versions of the API, but only that of the package’s major version, This means that if you are using Elasticsearch `v6`, you will be required to install `@elastic/elasticsearch@6`, and so on.
|
||||
* The code is no longer shipped with all the versions of the API, but only that
|
||||
of the package’s major version. This means that if you are using {es} `v6`, you
|
||||
are required to install `@elastic/elasticsearch@6`, and so on.
|
||||
|
||||
* The internals are completely different, so if you used to tweak them a lot, you will need to refactor your code. The public API should be almost the same.
|
||||
* The internals are completely different, so if you used to tweak them a lot,
|
||||
you will need to refactor your code. The public API should be almost the same.
|
||||
|
||||
* No more browser support, for that will be distributed via another module, `@elastic/elasticsearch-browser`. This module is intended for Node.js only.
|
||||
* There is no longer browser support, for that will be distributed via another
|
||||
module: `@elastic/elasticsearch-browser`. This module is intended for Node.js
|
||||
only.
|
||||
|
||||
* The returned value of an API call will no longer be the `body`, `statusCode`,
|
||||
and `headers` for callbacks, and only the `body` for promises. The new returned
|
||||
value will be a unique object containing the `body`, `statusCode`, `headers`,
|
||||
`warnings`, and `meta`, for both callback and promises.
|
||||
|
||||
* The returned value of an API call will no longer be the `body`, `statusCode`, and `headers` for callbacks and just the `body` for promises. The new returned value will be a unique object containing the `body`, `statusCode`, `headers`, `warnings`, and `meta`, for both callback and promises.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -53,14 +70,20 @@ client.search({
|
||||
----
|
||||
|
||||
|
||||
* Errors: there is no longer a custom error class for every HTTP status code (such as `BadRequest` or `NotFound`). There is instead a single `ResponseError`. Each error class has been renamed, and now each is suffixed with `Error` at the end.
|
||||
* Errors: there is no longer a custom error class for every HTTP status code
|
||||
(such as `BadRequest` or `NotFound`). There is instead a single `ResponseError`.
|
||||
Every error class has been renamed, and now each is suffixed with `Error` at the
|
||||
end.
|
||||
|
||||
* Errors that have been removed: `RequestTypeError`, `Generic`, and all the status code specific errors (such as `BadRequest` or `NotFound`).
|
||||
* Removed errors: `RequestTypeError`, `Generic`, and all the status code
|
||||
specific errors (such as `BadRequest` or `NotFound`).
|
||||
|
||||
* Errors that have been added: `ConfigurationError` (in case of bad configurations) and `ResponseError`, which contains all the data you may need to handle the specific error, such as `statusCode`, `headers`, `body`, and `message`.
|
||||
* Added errors: `ConfigurationError` (in case of bad configurations) and
|
||||
`ResponseError` that contains all the data you may need to handle the specific
|
||||
error, such as `statusCode`, `headers`, `body`, and `message`.
|
||||
|
||||
|
||||
* Errors that has been renamed:
|
||||
* Renamed errors:
|
||||
|
||||
** `RequestTimeout` (408 statusCode) => `TimeoutError`
|
||||
** `ConnectionFault` => `ConnectionError`
|
||||
@ -68,9 +91,12 @@ client.search({
|
||||
** `Serialization` => `SerializationError`
|
||||
** `Serialization` => `DeserializationError`
|
||||
|
||||
* You must specify the port number in the configuration. In the previous version you can specify the host and port in a variety of ways, with the new client there is only one via the `node` parameter.
|
||||
* You must specify the port number in the configuration. In the previous
|
||||
version, you can specify the host and port in a variety of ways. With the new
|
||||
client, there is only one way to do it, via the `node` parameter.
|
||||
|
||||
* The `plugins` option has been removed, if you want to extend the client now you should use the `client.extend` API.
|
||||
* The `plugins` option has been removed. If you want to extend the client now,
|
||||
you should use the `client.extend` API.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -84,7 +110,10 @@ const client = new Client({ ... })
|
||||
client.extend(...)
|
||||
----
|
||||
|
||||
* There is a clear distinction between the API related parameters and the client related configurations, the parameters `ignore`, `headers`, `requestTimeout` and `maxRetries` are no longer part of the API object, and you should specify them in a second option object.
|
||||
* There is a clear distinction between the API related parameters and the client
|
||||
related configurations. The parameters `ignore`, `headers`, `requestTimeout` and
|
||||
`maxRetries` are no longer part of the API object and you need to specify them
|
||||
in a second option object.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -121,7 +150,11 @@ client.search({
|
||||
})
|
||||
----
|
||||
|
||||
* The `transport.request` method will no longer accept the `query` key, but the `querystring` key instead (which can be a string or an object), furthermore, you need to send a bulk-like request, instead of the `body` key, you should use the `bulkBody` key. Also in this method, the client specific parameters should be passed as a second object.
|
||||
* The `transport.request` method no longer accepts the `query` key. Use the
|
||||
`querystring` key instead (which can be a string or an object). You also
|
||||
need to send a bulk-like request instead of the `body` key, use the `bulkBody`
|
||||
key. In this method, the client specific parameters should be passed as a second
|
||||
object.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -168,7 +201,8 @@ client.transport.request({
|
||||
|
||||
=== Talk is cheap. Show me the code.
|
||||
|
||||
Following you will find a snippet of code with the old client, followed by the same code logic, but with the new client.
|
||||
You can find a code snippet with the old client below followed by the same code
|
||||
logic but with the new client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,14 +1,23 @@
|
||||
[[child-client]]
|
||||
== Creating a child client
|
||||
|
||||
There are some use cases where you may need multiple instances of the client. You can easily do that by calling `new Client()` as many times as you need, but you will lose all the benefits of using one single client, such as the long living connections and the connection pool handling. +
|
||||
To avoid this problem the client offers a `child` API, which returns a new client instance that shares the connection pool with the parent client. +
|
||||
There are some use cases where you may need multiple instances of the client.
|
||||
You can easily do that by calling `new Client()` as many times as you need, but
|
||||
you will lose all the benefits of using one single client, such as the long
|
||||
living connections and the connection pool handling. To avoid this problem the
|
||||
client offers a `child` API, which returns a new client instance that shares the
|
||||
connection pool with the parent client.
|
||||
|
||||
NOTE: The event emitter is shared between the parent and the child(ren), and if you extend the parent client, the child client will have the same extensions, while if the child client adds an extension, the parent client will not be extended.
|
||||
NOTE: The event emitter is shared between the parent and the child(ren). If you
|
||||
extend the parent client, the child client will have the same extensions, while
|
||||
if the child client adds an extension, the parent client will not be extended.
|
||||
|
||||
You can pass to the `child` every client option you would pass to a normal client, but the connection pool specific options (`ssl`, `agent`, `pingTimeout`, `Connection`, and `resurrectStrategy`).
|
||||
You can pass to the `child` every client option you would pass to a normal
|
||||
client, but the connection pool specific options (`ssl`, `agent`, `pingTimeout`,
|
||||
`Connection`, and `resurrectStrategy`).
|
||||
|
||||
CAUTION: If you call `close` in any of the parent/child clients, every client will be closed.
|
||||
CAUTION: If you call `close` in any of the parent/child clients, every client
|
||||
will be closed.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
[[client-configuration]]
|
||||
== Client configuration
|
||||
|
||||
The client is designed to be easily configured as you see fit for your needs, following you can see all the possible basic options that you can use to configure it.
|
||||
The client is designed to be easily configured for your needs. In the following
|
||||
section, you can see the possible basic options that you can use to configure
|
||||
it.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -15,7 +17,9 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== Basic options
|
||||
|
||||
[cols=2*]
|
||||
|===
|
||||
|`node` or `nodes`
|
||||
@ -25,7 +29,7 @@ It can be a single string or an array of strings:
|
||||
----
|
||||
node: 'http://localhost:9200'
|
||||
----
|
||||
Or it can be an object (or an array of objects) that represents the node
|
||||
Or it can be an object (or an array of objects) that represents the node:
|
||||
[source,js]
|
||||
----
|
||||
node: {
|
||||
@ -44,8 +48,10 @@ node: {
|
||||
----
|
||||
|
||||
|`auth`
|
||||
a|Your authentication data. You can use both Basic authentication and https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[ApiKey]. +
|
||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication] for more details. +
|
||||
a|Your authentication data. You can use both basic authentication and
|
||||
{ref}/security-api-create-api-key.html[ApiKey]. +
|
||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
for more details. +
|
||||
_Default:_ `null`
|
||||
|
||||
Basic authentication:
|
||||
@ -56,7 +62,7 @@ auth: {
|
||||
password: 'changeme'
|
||||
}
|
||||
----
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/7.x/security-api-create-api-key.html[ApiKey] authentication:
|
||||
{ref}/security-api-create-api-key.html[ApiKey] authentication:
|
||||
[source,js]
|
||||
----
|
||||
auth: {
|
||||
@ -112,7 +118,8 @@ _Default:_ `false`
|
||||
_Default:_ `null`
|
||||
|
||||
|`agent`
|
||||
a|`http.AgentOptions, function` - http agent https://nodejs.org/api/http.html#http_new_agent_options[options], or a function that returns an actual http agent instance. +
|
||||
a|`http.AgentOptions, function` - http agent https://nodejs.org/api/http.html#http_new_agent_options[options],
|
||||
or a function that returns an actual http agent instance. +
|
||||
_Default:_ `null`
|
||||
[source,js]
|
||||
----
|
||||
@ -157,7 +164,8 @@ function nodeSelector (connections) {
|
||||
----
|
||||
|
||||
|`generateRequestId`
|
||||
a|`function` - function to generate the request id for every request, it takes two parameters, the request parameters and options. +
|
||||
a|`function` - function to generate the request id for every request, it takes
|
||||
two parameters, the request parameters and options. +
|
||||
By default it generates an incremental integer for every request. +
|
||||
_Custom function example:_
|
||||
[source,js]
|
||||
@ -173,12 +181,19 @@ function generateRequestId (params, options) {
|
||||
|`string` - The name to identify the client instance in the events. +
|
||||
_Default:_ `elasticsearch-js`
|
||||
|
||||
|`opaqueIdPrefix`
|
||||
|`string` - A string that will be use to prefix any `X-Opaque-Id` header. +
|
||||
See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/observability.html#_x-opaque-id_support[`X-Opaque-Id` support] for more details. +
|
||||
_Default:_ `null`
|
||||
|
||||
|`headers`
|
||||
|`object` - A set of custom headers to send in every request. +
|
||||
_Default:_ `{}`
|
||||
|
||||
|`cloud`
|
||||
a|`object` - Custom configuration for connecting to https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication] for more details. +
|
||||
a|`object` - Custom configuration for connecting to
|
||||
https://cloud.elastic.co[Elastic Cloud]. See https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/auth-reference.html[Authentication]
|
||||
for more details. +
|
||||
_Default:_ `null` +
|
||||
_Cloud configuration example:_
|
||||
[source,js]
|
||||
@ -196,17 +211,23 @@ const client = new Client({
|
||||
|
||||
|===
|
||||
|
||||
|
||||
=== Advanced configuration
|
||||
If you need to customize the client behavior heavily, you are in the right place! +
|
||||
The client allows you to customize the following internals:
|
||||
|
||||
If you need to customize the client behavior heavily, you are in the right
|
||||
place! The client allows you to customize the following internals:
|
||||
|
||||
* `Transport` class
|
||||
* `ConnectionPool` class
|
||||
* `Connection` class
|
||||
* `Serializer` class
|
||||
|
||||
|
||||
=== `Transport`
|
||||
This class is responsible to perform the request to Elasticsearch and handling errors, it also handle the sniffing.
|
||||
|
||||
This class is responsible for performing the request to {es} and handling
|
||||
errors, it also handles the sniffing.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client, Transport } = require('@elastic/elasticsearch')
|
||||
@ -222,20 +243,26 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
Sometimes you just need to inject a little snippet of your code and then continue to use the usual client code, in such case, you should call `super.method`.
|
||||
Sometimes you need to inject a small snippet of your code and then continue to
|
||||
use the usual client code. In such cases, call `super.method`:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
class MyTransport extends Transport {
|
||||
request (params, options, callback) {
|
||||
// your code
|
||||
super.request(params, options, callback)
|
||||
return super.request(params, options, callback)
|
||||
}
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
=== `ConnectionPool`
|
||||
This class is responsible for keeping in memory all the Elasticsearch Connection that we are using, there is a single Connection for every node. +
|
||||
Moreover, the connection pool will handle the resurrection strategies and the updates of the pool.
|
||||
|
||||
This class is responsible for keeping in memory all the {es} Connection that we
|
||||
are using. There is a single Connection for every node. The connection pool
|
||||
handles the resurrection strategies and the updates of the pool.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client, ConnectionPool } = require('@elastic/elasticsearch')
|
||||
@ -252,8 +279,14 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== `Connection`
|
||||
This class represents a single Node, it holds every information we have on the node, such as roles, id, URL, custom headers and so on. The actual HTTP request is performed here, this means that if you want to swap the default HTTP client (Node.js core), you should override this class `request` method.
|
||||
|
||||
This class represents a single node, it holds every information we have on the
|
||||
node, such as roles, id, URL, custom headers and so on. The actual HTTP request
|
||||
is performed here, this means that if you want to swap the default HTTP client
|
||||
(Node.js core), you should override the `request` method of this class.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client, Connection } = require('@elastic/elasticsearch')
|
||||
@ -269,13 +302,16 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
=== `Serializer`
|
||||
This class is responsible of the serialization of every request, it offers the following methods:
|
||||
|
||||
* `serialize(object: any): string;`, serializes request objects
|
||||
* `deserialize(json: string): any;`, deserializes response strings
|
||||
* `ndserialize(array: any[]): string;`, serializes bulk request objects
|
||||
* `qserialize(object: any): string;`, serializes request query parameters
|
||||
=== `Serializer`
|
||||
|
||||
This class is responsible for the serialization of every request, it offers the
|
||||
following methods:
|
||||
|
||||
* `serialize(object: any): string;` serializes request objects.
|
||||
* `deserialize(json: string): any;` deserializes response strings.
|
||||
* `ndserialize(array: any[]): string;` serializes bulk request objects.
|
||||
* `qserialize(object: any): string;` serializes request query parameters.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
[[as_stream_examples]]
|
||||
== asStream
|
||||
|
||||
Instead of getting the parsed body back, you will get the raw Node.js stream of data.
|
||||
Instead of getting the parsed body back, you will get the raw Node.js stream of
|
||||
data.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -76,7 +77,8 @@ async function run () {
|
||||
run().catch(console.log)
|
||||
----
|
||||
|
||||
TIP: This can be useful if you need to pipe the Elasticsearch's response to a proxy, or send it directly to another source.
|
||||
TIP: This can be useful if you need to pipe the {es}'s response to a proxy, or
|
||||
send it directly to another source.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
[[bulk_examples]]
|
||||
== Bulk
|
||||
|
||||
The `bulk` API makes it possible to perform many index/delete operations in a single API call. +
|
||||
This can greatly increase the indexing speed.
|
||||
The `bulk` API makes it possible to perform many index/delete operations in a
|
||||
single API call. This can greatly increase the indexing speed.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
[[get_examples]]
|
||||
== Get
|
||||
|
||||
The get API allows to get a typed JSON document from the index based on its id. +
|
||||
The following example gets a JSON document from an index called `game-of-thrones`, under a type called `_doc`, with id valued `'1'`.
|
||||
The get API allows to get a typed JSON document from the index based on its id.
|
||||
The following example gets a JSON document from an index called
|
||||
`game-of-thrones`, under a type called `_doc`, with id valued `'1'`.
|
||||
|
||||
[source,js]
|
||||
---------
|
||||
|
||||
@ -7,6 +7,10 @@ Following you can find some examples on how to use the client.
|
||||
* Executing a <<bulk_examples,bulk>> request;
|
||||
* Executing a <<exists_examples,exists>> request;
|
||||
* Executing a <<get_examples,get>> request;
|
||||
* Executing a <<sql_query_examples,sql.query>> request;
|
||||
* Executing a <<update_examples,update>> request;
|
||||
* Executing a <<update_by_query_examples,update by query>> request;
|
||||
* Executing a <<reindex_examples,reindex>> request;
|
||||
* Use of the <<ignore_examples,ignore>> parameter;
|
||||
* Executing a <<msearch_examples,msearch>> request;
|
||||
* How do I <<scroll_examples,scroll>>?
|
||||
@ -26,3 +30,7 @@ include::search.asciidoc[]
|
||||
include::suggest.asciidoc[]
|
||||
include::transport.request.asciidoc[]
|
||||
include::typescript.asciidoc[]
|
||||
include::sql.query.asciidoc[]
|
||||
include::update.asciidoc[]
|
||||
include::update_by_query.asciidoc[]
|
||||
include::reindex.asciidoc[]
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
[[msearch_examples]]
|
||||
== MSearch
|
||||
|
||||
The multi search API allows to execute several search requests within the same API.
|
||||
The multi search API allows to execute several search requests within the same
|
||||
API.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
75
docs/examples/reindex.asciidoc
Normal file
75
docs/examples/reindex.asciidoc
Normal file
@ -0,0 +1,75 @@
|
||||
[[reindex_examples]]
|
||||
== Reindex
|
||||
|
||||
The `reindex` API extracts the document source from the source index and indexes the documents into the destination index. You can copy all documents to the destination index, reindex a subset of the documents or update the source before to reindex it.
|
||||
|
||||
In the following example we have a `game-of-thrones` index which contains different quotes of various characters, we want to create a new index only for the house Stark and remove the `house` field from the document source.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
house: 'stark'
|
||||
}
|
||||
})
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Arya Stark',
|
||||
quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.',
|
||||
house: 'stark'
|
||||
}
|
||||
})
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A Lannister always pays his debts.',
|
||||
house: 'lannister'
|
||||
}
|
||||
})
|
||||
|
||||
await client.reindex({
|
||||
waitForCompletion: true,
|
||||
refresh: true,
|
||||
body: {
|
||||
source: {
|
||||
index: 'game-of-thrones',
|
||||
query: {
|
||||
match: { character: 'stark' }
|
||||
}
|
||||
},
|
||||
dest: {
|
||||
index: 'stark-index'
|
||||
},
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source.remove("house")'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'stark-index',
|
||||
body: {
|
||||
query: { match_all: {} }
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
@ -1,13 +1,23 @@
|
||||
[[scroll_examples]]
|
||||
== Scroll
|
||||
|
||||
While a search request returns a single “page” of results, the scroll API can be used to retrieve large numbers of results (or even all results) from a single search request, in much the same way as you would use a cursor on a traditional database.
|
||||
While a search request returns a single “page” of results, the scroll API can be
|
||||
used to retrieve large numbers of results (or even all results) from a single
|
||||
search request, in much the same way as you would use a cursor on a traditional
|
||||
database.
|
||||
|
||||
Scrolling is not intended for real time user requests, but rather for processing large amounts of data, e.g. in order to reindex the contents of one index into a new index with a different configuration.
|
||||
Scrolling is not intended for real time user requests, but rather for processing
|
||||
large amounts of data, e.g. in order to reindex the contents of one index into a
|
||||
new index with a different configuration.
|
||||
|
||||
NOTE: The results that are returned from a scroll request reflect the state of the index at the time that the initial search request was made, like a snapshot in time. Subsequent changes to documents (index, update or delete) will only affect later search requests.
|
||||
NOTE: The results that are returned from a scroll request reflect the state of
|
||||
the index at the time that the initial search request was made, like a snapshot
|
||||
in time. Subsequent changes to documents (index, update or delete) will only
|
||||
affect later search requests.
|
||||
|
||||
In order to use scrolling, the initial search request should specify the scroll parameter in the query string, which tells Elasticsearch how long it should keep the “search context” alive.
|
||||
In order to use scrolling, the initial search request should specify the scroll
|
||||
parameter in the query string, which tells Elasticsearch how long it should keep
|
||||
the “search context” alive.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -100,7 +110,8 @@ async function run () {
|
||||
run().catch(console.log)
|
||||
----
|
||||
|
||||
Another cool usage of the `scroll` API can be done with Node.js ≥ 10, by using async iteration!
|
||||
Another cool usage of the `scroll` API can be done with Node.js ≥ 10, by using
|
||||
async iteration!
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,8 +1,11 @@
|
||||
[[search_examples]]
|
||||
== Search
|
||||
|
||||
The `search` API allows you to execute a search query and get back search hits that match the query. +
|
||||
The query can either be provided using a simple https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-uri-request.html[query string as a parameter], or using a https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-request-body.html[request body].
|
||||
The `search` API allows you to execute a search query and get back search hits
|
||||
that match the query. The query can either be provided using a simple
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-uri-request.html[query string as a parameter],
|
||||
or using a
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/6.6/search-request-body.html[request body].
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
64
docs/examples/sql.asciidoc
Normal file
64
docs/examples/sql.asciidoc
Normal file
@ -0,0 +1,64 @@
|
||||
[[sql_examples]]
|
||||
== SQL
|
||||
|
||||
Elasticsearch SQL is an X-Pack component that allows SQL-like queries to be executed in real-time against Elasticsearch. Whether using the REST interface, command-line or JDBC, any client can use SQL to search and aggregate data natively inside Elasticsearch. One can think of Elasticsearch SQL as a translator, one that understands both SQL and Elasticsearch and makes it easy to read and process data in real-time, at scale by leveraging Elasticsearch capabilities.
|
||||
|
||||
In the following example we will search all the documents that has the field `house` equals to `stark`, log the result with the tabular view and then manipulate the result to obtain an object easy to navigate.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
house: 'stark'
|
||||
}
|
||||
})
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Arya Stark',
|
||||
quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.',
|
||||
house: 'stark'
|
||||
}
|
||||
})
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A Lannister always pays his debts.',
|
||||
house: 'lannister'
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.sql.query({
|
||||
body: {
|
||||
query: "SELECT * FROM \"game-of-thrones\" WHERE house='stark'"
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
|
||||
const data = body.rows.map(row => {
|
||||
const obj = {}
|
||||
for (var i = 0; i < row.length; i++) {
|
||||
obj[body.columns[i].name] = row[i]
|
||||
}
|
||||
return obj
|
||||
})
|
||||
|
||||
console.log(data)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
64
docs/examples/sql.query.asciidoc
Normal file
64
docs/examples/sql.query.asciidoc
Normal file
@ -0,0 +1,64 @@
|
||||
[[sql_query_examples]]
|
||||
== SQL
|
||||
|
||||
Elasticsearch SQL is an X-Pack component that allows SQL-like queries to be executed in real-time against Elasticsearch. Whether using the REST interface, command-line or JDBC, any client can use SQL to search and aggregate data natively inside Elasticsearch. One can think of Elasticsearch SQL as a translator, one that understands both SQL and Elasticsearch and makes it easy to read and process data in real-time, at scale by leveraging Elasticsearch capabilities.
|
||||
|
||||
In the following example we will search all the documents that has the field `house` equals to `stark`, log the result with the tabular view and then manipulate the result to obtain an object easy to navigate.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
house: 'stark'
|
||||
}
|
||||
})
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Arya Stark',
|
||||
quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.',
|
||||
house: 'stark'
|
||||
}
|
||||
})
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A Lannister always pays his debts.',
|
||||
house: 'lannister'
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.sql.query({
|
||||
body: {
|
||||
query: "SELECT * FROM \"game-of-thrones\" WHERE house='stark'"
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
|
||||
const data = body.rows.map(row => {
|
||||
const obj = {}
|
||||
for (var i = 0; i < row.length; i++) {
|
||||
obj[body.columns[i].name] = row[i]
|
||||
}
|
||||
return obj
|
||||
})
|
||||
|
||||
console.log(data)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
@ -1,10 +1,11 @@
|
||||
[[suggest_examples]]
|
||||
== Suggest
|
||||
|
||||
The suggest feature suggests similar looking terms based on a provided text by using a suggester. _Parts of the suggest feature are still under development._
|
||||
The suggest feature suggests similar looking terms based on a provided text by
|
||||
using a suggester. _Parts of the suggest feature are still under development._
|
||||
|
||||
The suggest request part is defined alongside the query part in a `search` request. +
|
||||
If the query part is left out, only suggestions are returned.
|
||||
The suggest request part is defined alongside the query part in a `search`
|
||||
request. If the query part is left out, only suggestions are returned.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,12 +1,19 @@
|
||||
[[transport_request_examples]]
|
||||
== transport.request
|
||||
|
||||
It can happen that you need to communicate with Elasticsearch by using an API that is not supported by the client, to mitigate this issue you can directly call `client.transport.request`, which is the internal utility that the client uses to communicate with Elasticsearch when you use an API method.
|
||||
It can happen that you need to communicate with {es} by using an API that is not
|
||||
supported by the client, to mitigate this issue you can directly call
|
||||
`client.transport.request`, which is the internal utility that the client uses
|
||||
to communicate with {es} when you use an API method.
|
||||
|
||||
NOTE: When using the `transport.request` method you must provide all the parameters needed to perform an HTTP call, such as `method`, `path`, `querystring`, and `body`.
|
||||
NOTE: When using the `transport.request` method you must provide all the
|
||||
parameters needed to perform an HTTP call, such as `method`, `path`,
|
||||
`querystring`, and `body`.
|
||||
|
||||
|
||||
TIP: If you find yourself use this method too often, take in consideration the use of `client.extend`, which will make your code look cleaner and easier to maintain.
|
||||
TIP: If you find yourself use this method too often, take in consideration the
|
||||
use of `client.extend`, which will make your code look cleaner and easier to
|
||||
maintain.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
[[typescript_examples]]
|
||||
== Typescript
|
||||
|
||||
The client offers a first-class support for TypeScript, since it ships the type definitions for every exposed API.
|
||||
The client offers a first-class support for TypeScript, since it ships the type
|
||||
definitions for every exposed API.
|
||||
|
||||
NOTE: If you are using TypeScript you will be required to use _snake_case_ style to define the API parameters instead of _camelCase_.
|
||||
NOTE: If you are using TypeScript you will be required to use _snake_case_ style
|
||||
to define the API parameters instead of _camelCase_.
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
|
||||
59
docs/examples/update-by-query.asciidoc
Normal file
59
docs/examples/update-by-query.asciidoc
Normal file
@ -0,0 +1,59 @@
|
||||
[[update_by_query_examples]]
|
||||
== Update By Query
|
||||
|
||||
The simplest usage of _update_by_query just performs an update on every document in the index without changing the source. This is useful to pick up a new property or some other online mapping change.
|
||||
|
||||
[source,js]
|
||||
---------
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
})
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
character: 'Arya Stark',
|
||||
quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.'
|
||||
}
|
||||
})
|
||||
|
||||
await client.updateByQuery({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source["house"] = "stark"'
|
||||
},
|
||||
query: {
|
||||
match: {
|
||||
character: 'stark'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: { match_all: {} }
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
---------
|
||||
92
docs/examples/update.asciidoc
Normal file
92
docs/examples/update.asciidoc
Normal file
@ -0,0 +1,92 @@
|
||||
[[update_examples]]
|
||||
== Update
|
||||
|
||||
The update API allows updates of a specific document using the given script. +
|
||||
In the following example, we will index a document that also tracks how many times a character has said the given quote, and then we will update the `times` field.
|
||||
|
||||
[source,js]
|
||||
---------
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
times: 0
|
||||
}
|
||||
})
|
||||
|
||||
await client.update({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source.times++'
|
||||
// you can also use parameters
|
||||
// source: 'ctx._source.times += params.count',
|
||||
// params: { count: 1 }
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.get({
|
||||
index: 'game-of-thrones',
|
||||
id: '1'
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
---------
|
||||
|
||||
With the update API, you can also run a partial update of a document.
|
||||
|
||||
[source,js]
|
||||
---------
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.',
|
||||
isAlive: true
|
||||
}
|
||||
})
|
||||
|
||||
await client.update({
|
||||
index: 'game-of-thrones',
|
||||
id: '1',
|
||||
body: {
|
||||
doc: {
|
||||
isAlive: false
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.get({
|
||||
index: 'game-of-thrones',
|
||||
id: '1'
|
||||
})
|
||||
|
||||
console.log(body)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
|
||||
---------
|
||||
59
docs/examples/update_by_query.asciidoc
Normal file
59
docs/examples/update_by_query.asciidoc
Normal file
@ -0,0 +1,59 @@
|
||||
[[update_by_query_examples]]
|
||||
== Update By Query
|
||||
|
||||
The simplest usage of _update_by_query just performs an update on every document in the index without changing the source. This is useful to pick up a new property or some other online mapping change.
|
||||
|
||||
[source,js]
|
||||
---------
|
||||
'use strict'
|
||||
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
|
||||
async function run () {
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
}
|
||||
})
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
character: 'Arya Stark',
|
||||
quote: 'A girl is Arya Stark of Winterfell. And I\'m going home.'
|
||||
}
|
||||
})
|
||||
|
||||
await client.updateByQuery({
|
||||
index: 'game-of-thrones',
|
||||
refresh: true,
|
||||
body: {
|
||||
script: {
|
||||
lang: 'painless',
|
||||
source: 'ctx._source["house"] = "stark"'
|
||||
},
|
||||
query: {
|
||||
match: {
|
||||
character: 'stark'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const { body } = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
body: {
|
||||
query: { match_all: {} }
|
||||
}
|
||||
})
|
||||
|
||||
console.log(body.hits.hits)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
|
||||
---------
|
||||
@ -1,10 +1,12 @@
|
||||
[[extend-client]]
|
||||
== Extend the client
|
||||
|
||||
Sometimes you need to reuse the same logic, or you want to build a custom API to allow you simplify your code. +
|
||||
The easiest way to achieve that is by extending the client.
|
||||
Sometimes you need to reuse the same logic, or you want to build a custom API to
|
||||
allow you simplify your code. The easiest way to achieve that is by extending
|
||||
the client.
|
||||
|
||||
NOTE: If you want to override existing methods, you should specify the `{ force: true }` option.
|
||||
NOTE: If you want to override existing methods, you should specify the
|
||||
`{ force: true }` option.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
[[introduction]]
|
||||
== Introduction
|
||||
|
||||
The official Node.js client for Elasticsearch.
|
||||
The official Node.js client for {es}.
|
||||
|
||||
|
||||
=== Features
|
||||
|
||||
* One-to-one mapping with REST API.
|
||||
* Generalized, pluggable architecture.
|
||||
* Configurable, automatic discovery of cluster nodes.
|
||||
@ -12,21 +14,27 @@ The official Node.js client for Elasticsearch.
|
||||
* Child client support.
|
||||
* TypeScript support out of the box.
|
||||
|
||||
|
||||
=== Install
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install @elastic/elasticsearch
|
||||
----
|
||||
|
||||
|
||||
=== Compatibility
|
||||
|
||||
The minimum supported version of Node.js is `v8`.
|
||||
|
||||
The library is compatible with all Elasticsearch versions since 5.x, and you should use the same major version of the Elasticsearch instance that you are using.
|
||||
The library is compatible with all {es} versions since 5.x. We recommend you to
|
||||
use the same major version of the client as the {es} instance that you are
|
||||
using.
|
||||
|
||||
|
||||
[%header,cols=2*]
|
||||
|===
|
||||
|Elasticsearch Version
|
||||
|{es} Version
|
||||
|Client Version
|
||||
|
||||
|`master`
|
||||
@ -42,26 +50,33 @@ The library is compatible with all Elasticsearch versions since 5.x, and you sho
|
||||
|`5.x`
|
||||
|===
|
||||
|
||||
To install a specific major of the client, run the following command:
|
||||
To install a specific major version of the client, run the following command:
|
||||
|
||||
----
|
||||
npm install @elastic/elasticsearch@<major>
|
||||
----
|
||||
|
||||
|
||||
==== Browser
|
||||
|
||||
WARNING: There is no official support for the browser environment. It exposes your Elasticsearch instance to everyone, which could lead to security issues.
|
||||
We recommend that you write a lightweight proxy that uses this client instead.
|
||||
WARNING: There is no official support for the browser environment. It exposes
|
||||
your {es} instance to everyone, which could lead to security issues. We
|
||||
recommend you to write a lightweight proxy that uses this client instead.
|
||||
|
||||
|
||||
=== Quick start
|
||||
|
||||
First of all, require the client and initialize it:
|
||||
First of all, require, then initialize the client:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
----
|
||||
|
||||
You can use both the callback-style API and the promise-style API, both behave the same way.
|
||||
|
||||
You can use both the callback API and the promise API, both behave the same way.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// promise API
|
||||
@ -78,7 +93,10 @@ client.search({
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
The returned value of **every** API call is formed as follows:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
{
|
||||
@ -90,7 +108,9 @@ The returned value of **every** API call is formed as follows:
|
||||
}
|
||||
----
|
||||
|
||||
|
||||
Let's see a complete example!
|
||||
|
||||
[source,js]
|
||||
----
|
||||
'use strict'
|
||||
@ -102,7 +122,7 @@ async function run () {
|
||||
// Let's start by indexing some data
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using Elasticsearch ≤ 6
|
||||
// type: '_doc', // uncomment this line if you are using {es} ≤ 6
|
||||
body: {
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
@ -111,7 +131,7 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using Elasticsearch ≤ 6
|
||||
// type: '_doc', // uncomment this line if you are using {es} ≤ 6
|
||||
body: {
|
||||
character: 'Daenerys Targaryen',
|
||||
quote: 'I am the blood of the dragon.'
|
||||
@ -120,21 +140,21 @@ async function run () {
|
||||
|
||||
await client.index({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using Elasticsearch ≤ 6
|
||||
// type: '_doc', // uncomment this line if you are using {es} ≤ 6
|
||||
body: {
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A mind needs books like a sword needs a whetstone.'
|
||||
}
|
||||
})
|
||||
|
||||
// here we are forcing an index refresh, otherwise we will not
|
||||
// We need to force an index refresh at this point, otherwise we will not
|
||||
// get any result in the consequent search
|
||||
await client.indices.refresh({ index: 'game-of-thrones' })
|
||||
|
||||
// Let's search!
|
||||
const { body } = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
// type: '_doc', // uncomment this line if you are using Elasticsearch ≤ 6
|
||||
// type: '_doc', // uncomment this line if you are using {es} ≤ 6
|
||||
body: {
|
||||
query: {
|
||||
match: { quote: 'winter' }
|
||||
@ -148,25 +168,32 @@ async function run () {
|
||||
run().catch(console.log)
|
||||
----
|
||||
|
||||
==== Install multiple versions
|
||||
If you are using multiple versions of Elasticsearch, you need to use multiple versions of the client. +
|
||||
In the past, install multiple versions of the same package was not possible, but with `npm v6.9`, you can do that via aliasing.
|
||||
|
||||
The command you must run to install different version of the client is:
|
||||
==== Install multiple versions
|
||||
|
||||
If you are using multiple versions of {es}, you need to use multiple versions of
|
||||
the client as well. In the past, installing multiple versions of the same
|
||||
package was not possible, but with `npm v6.9`, you can do it via aliasing.
|
||||
|
||||
To install different version of the client, run the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install <alias>@npm:@elastic/elasticsearch@<version>
|
||||
----
|
||||
|
||||
So for example if you need to install `7.x` and `6.x`, you will run
|
||||
|
||||
For example, if you need to install `7.x` and `6.x`, run the following commands:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install es6@npm:@elastic/elasticsearch@6
|
||||
npm install es7@npm:@elastic/elasticsearch@7
|
||||
----
|
||||
|
||||
And your `package.json` will look like the following:
|
||||
|
||||
Your `package.json` will look similar to the following example:
|
||||
|
||||
[source,json]
|
||||
----
|
||||
"dependencies": {
|
||||
@ -175,7 +202,8 @@ And your `package.json` will look like the following:
|
||||
}
|
||||
----
|
||||
|
||||
You will require the packages from your code by using the alias you have defined.
|
||||
|
||||
Require the packages from your code by using the alias you have defined.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -189,9 +217,13 @@ client6.info(console.log)
|
||||
client7.info(console.log)
|
||||
----
|
||||
|
||||
Finally, if you want to install the client for the next version of Elasticsearch (the one that lives in Elasticsearch's master branch), you can use the following command:
|
||||
|
||||
Finally, if you want to install the client for the next version of {es} (the one
|
||||
that lives in the {es} master branch), use the following command:
|
||||
|
||||
[source,sh]
|
||||
----
|
||||
npm install esmaster@github:elastic/elasticsearch-js
|
||||
----
|
||||
WARNING: This command will install the master branch of the client, which is not considered stable.
|
||||
WARNING: This command installs the master branch of the client which is not
|
||||
considered stable.
|
||||
@ -1,15 +1,22 @@
|
||||
[[observability]]
|
||||
== Observability
|
||||
|
||||
The client does not provide a default logger, but instead it offers an event emitter interfaces to hook into internal events, such as `request` and `response`.
|
||||
The client does not provide a default logger, but instead it offers an event
|
||||
emitter interfaces to hook into internal events, such as `request` and
|
||||
`response`.
|
||||
|
||||
Correlating those events can be quite hard, especially if your applications have a large codebase with many events happening at the same time.
|
||||
Correlating those events can be quite hard, especially if your applications have
|
||||
a large codebase with many events happening at the same time.
|
||||
|
||||
To help you with this, the client offers you a correlation id system and other features, let's see them in action.
|
||||
To help you with this, the client offers you a correlation id system and other
|
||||
features. Let's see them in action.
|
||||
|
||||
=== Events
|
||||
The client is an event emitter, this means that you can listen for its event and add additional logic to your code, without need to change the client internals or your normal usage. +
|
||||
You can find the events names by access the `events` key of the client.
|
||||
|
||||
The client is an event emitter, this means that you can listen for its event and
|
||||
add additional logic to your code, without need to change the client internals
|
||||
or your normal usage. You can find the events names by access the `events` key
|
||||
of the client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -17,7 +24,9 @@ const { events } = require('@elastic/elasticsearch')
|
||||
console.log(events)
|
||||
----
|
||||
|
||||
The event emitter functionality can be useful if you want to log every request, response and error that is happening during the use of the client.
|
||||
|
||||
The event emitter functionality can be useful if you want to log every request,
|
||||
response and error that is happening during the use of the client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -34,11 +43,12 @@ client.on('response', (err, result) => {
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
The client emits the following events:
|
||||
[cols=2*]
|
||||
|===
|
||||
|`request`
|
||||
a|Emitted before sending the actual request to Elasticsearch _(emitted multiple times in case of retries)_.
|
||||
a|Emitted before sending the actual request to {es} _(emitted multiple times in case of retries)_.
|
||||
[source,js]
|
||||
----
|
||||
client.on('request', (err, result) => {
|
||||
@ -47,7 +57,7 @@ client.on('request', (err, result) => {
|
||||
----
|
||||
|
||||
|`response`
|
||||
a|Emitted once Elasticsearch response has been received and parsed.
|
||||
a|Emitted once {es} response has been received and parsed.
|
||||
[source,js]
|
||||
----
|
||||
client.on('response', (err, result) => {
|
||||
@ -76,6 +86,7 @@ client.on('resurrect', (err, result) => {
|
||||
|===
|
||||
|
||||
The values of `result` in `request`, `response` and `sniff` will be:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
body: any;
|
||||
@ -100,7 +111,9 @@ meta: {
|
||||
};
|
||||
----
|
||||
|
||||
|
||||
While the `result` value in `resurrect` will be:
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
strategy: string;
|
||||
@ -112,8 +125,13 @@ request: {
|
||||
};
|
||||
----
|
||||
|
||||
|
||||
=== Correlation id
|
||||
Correlating events can be quite hard, especially if there are many events at the same time. The client offers you an automatic (and configurable) system to help you handle this problem.
|
||||
|
||||
Correlating events can be quite hard, especially if there are many events at the
|
||||
same time. The client offers you an automatic (and configurable) system to help
|
||||
you handle this problem.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -141,7 +159,10 @@ client.search({
|
||||
})
|
||||
----
|
||||
|
||||
By default the id is an incremental integer, but you can easily configure that with the `generateRequestId` option:
|
||||
|
||||
By default the id is an incremental integer, but you can easily configure that
|
||||
with the `generateRequestId` option:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -156,7 +177,9 @@ const client = new Client({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
You can also specify a custom id per request:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
client.search({
|
||||
@ -169,8 +192,12 @@ client.search({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== Context object
|
||||
Sometimes, you might need to make some custom data available in your events, you can do that via the `context` option of a request:
|
||||
|
||||
Sometimes, you might need to make some custom data available in your events, you
|
||||
can do that via the `context` option of a request:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -202,8 +229,14 @@ client.search({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== Client name
|
||||
If you are using multiple instances of the client or if you are using multiple child clients _(which is the recommended way to have multiple instances of the client)_, you might need to recognize which client you are using, the `name` options will help you in this regard:
|
||||
|
||||
If you are using multiple instances of the client or if you are using multiple
|
||||
child clients _(which is the recommended way to have multiple instances of the
|
||||
client)_, you might need to recognize which client you are using. The `name`
|
||||
options will help you in this regard.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
@ -248,3 +281,59 @@ child.search({
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== X-Opaque-Id support
|
||||
|
||||
To improve the overall observability, the client offers an easy way to configure
|
||||
the `X-Opaque-Id` header. If you set the `X-Opaque-Id` in a specific request,
|
||||
this will allow you to discover this identifier in the
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/master/logging.html#deprecation-logging[deprecation logs],
|
||||
help you with https://www.elastic.co/guide/en/elasticsearch/reference/master/index-modules-slowlog.html#_identifying_search_slow_log_origin[identifying search slow log origin]
|
||||
as well as https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html#_identifying_running_tasks[identifying running tasks].
|
||||
|
||||
The `X-Opaque-Id` should be configured in each request, for doing that you can
|
||||
use the `opaqueId` option, as you can see in the following example. The
|
||||
resulting header will be `{ 'X-Opaque-Id': 'my-search' }`.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
opaqueId: 'my-search'
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
Sometimes it may be useful to prefix all the `X-Opaque-Id` headers with a
|
||||
specific string, in case you need to identify a specific client or server. For
|
||||
doing this, the client offers a top-level configuration option:
|
||||
`opaqueIdPrefix`. In the following example, the resulting header will be
|
||||
`{ 'X-Opaque-Id': 'proxy-client::my-search' }`.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
opaqueIdPrefix: 'proxy-client::'
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
opaqueId: 'my-search'
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,12 +1,19 @@
|
||||
[[typescript]]
|
||||
== TypeScript support
|
||||
|
||||
The client offers a first-class support for TypeScript, since it ships the type definitions for every exposed API.
|
||||
The client offers a first-class support for TypeScript, since it ships the type
|
||||
definitions for every exposed API.
|
||||
|
||||
NOTE: If you are using TypeScript you will be required to use _snake_case_ style to define the API parameters instead of _camelCase_.
|
||||
NOTE: If you are using TypeScript you will be required to use _snake_case_ style
|
||||
to define the API parameters instead of _camelCase_.
|
||||
|
||||
Other than the types for the surface API, the client offers the types for every request method, via the `RequestParams`, if you need the types for a search request for instance, you can access them via `RequestParams.Search`.
|
||||
Every API that supports a body, accepts a https://www.typescriptlang.org/docs/handbook/generics.html[generics] which represents the type of the request body, if you don't configure anything, it will default to `any`.
|
||||
Other than the types for the surface API, the client offers the types for every
|
||||
request method, via the `RequestParams`, if you need the types for a search
|
||||
request for instance, you can access them via `RequestParams.Search`.
|
||||
Every API that supports a body, accepts a
|
||||
https://www.typescriptlang.org/docs/handbook/generics.html[generics] which
|
||||
represents the type of the request body, if you don't configure anything, it
|
||||
will default to `any`.
|
||||
|
||||
For example:
|
||||
|
||||
@ -40,7 +47,9 @@ const searchParams: RequestParams.Search = {
|
||||
}
|
||||
----
|
||||
|
||||
You can find the type definiton of a response in `ApiResponse`, which accepts a generics as well if you want to specify the body type, otherwise it defaults to `any`.
|
||||
You can find the type definiton of a response in `ApiResponse`, which accepts a
|
||||
generics as well if you want to specify the body type, otherwise it defaults to
|
||||
`any`.
|
||||
|
||||
[source,ts]
|
||||
----
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
[[client-usage]]
|
||||
== Usage
|
||||
|
||||
Use the client is pretty straightforward, it supports all the public APIs of Elasticsearch, and every method exposes the same signature.
|
||||
Using the client is straightforward, it supports all the public APIs of {es},
|
||||
and every method exposes the same signature.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -36,11 +38,14 @@ The returned value of every API call is formed as follows:
|
||||
}
|
||||
----
|
||||
|
||||
NOTE: The body will be a boolean value when using `HEAD` APIs.
|
||||
NOTE: The body is a boolean value when you use `HEAD` APIs.
|
||||
|
||||
The above value will be returned even if there is an error during the execution of the request, this means that you can safely use the https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Destructuring_assignment[destructuring assignment].
|
||||
The above value is returned even if there is an error during the execution of
|
||||
the request, this means that you can safely use the
|
||||
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Destructuring_assignment[destructuring assignment].
|
||||
|
||||
The `meta` key contains all the information regarding the request, such as attempt, options, and the connection that has been used.
|
||||
The `meta` key contains all the information about the request, such as attempt,
|
||||
options, and the connection that has been used.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -59,9 +64,12 @@ client.search({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
=== Aborting a request
|
||||
|
||||
When using the callback style API, the function will also return an object that allows you to abort the API request.
|
||||
When using the callback style API, the function also returns an object that
|
||||
allows you to abort the API request.
|
||||
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -79,7 +87,8 @@ const request = client.search({
|
||||
request.abort()
|
||||
----
|
||||
|
||||
Aborting a request with the promise style API is not supported, but you can easily achieve that with convenience wrapper.
|
||||
Aborting a request with the promise style API is not supported, but you can
|
||||
achieve that with convenience wrapper.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -108,8 +117,10 @@ request.abort()
|
||||
// access the promise with `request.promise.[method]`
|
||||
----
|
||||
|
||||
|
||||
=== Request specific options
|
||||
If needed you can pass request specific options in a second object:
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// promise API
|
||||
@ -133,6 +144,7 @@ client.search({
|
||||
})
|
||||
----
|
||||
|
||||
|
||||
The supported request specific options are:
|
||||
[cols=2*]
|
||||
|===
|
||||
@ -154,7 +166,7 @@ _Options:_ `false`, `'gzip'` +
|
||||
_Default:_ `false`
|
||||
|
||||
|`asStream`
|
||||
|`boolean` - Instead of getting the parsed body back, you will get the raw Node.js stream of data. +
|
||||
|`boolean` - Instead of getting the parsed body back, you get the raw Node.js stream of data. +
|
||||
_Default:_ `false`
|
||||
|
||||
|`headers`
|
||||
@ -170,13 +182,16 @@ _Default:_ `null`
|
||||
_Default:_ `null`
|
||||
|
||||
|`context`
|
||||
|`any` - Custom object per request. _(you can use it to pass some data to the clients events)_ +
|
||||
|`any` - Custom object per request. _(you can use it to pass data to the clients events)_ +
|
||||
_Default:_ `null`
|
||||
|===
|
||||
|
||||
|
||||
=== Error handling
|
||||
The client exposes a variety of error objects, that you can use to enhance your error handling. +
|
||||
You can find all the error objects inside the `errors` key in the client.
|
||||
|
||||
The client exposes a variety of error objects that you can use to enhance your
|
||||
error handling. You can find all the error objects inside the `errors` key in
|
||||
the client.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
@ -184,7 +199,9 @@ const { errors } = require('@elastic/elasticsearch')
|
||||
console.log(errors)
|
||||
----
|
||||
|
||||
Following you can find the errors exported by the client.
|
||||
|
||||
You can find the errors exported by the client in the table below.
|
||||
|
||||
[cols=2*]
|
||||
|===
|
||||
|`ElasticsearchClientErrors`
|
||||
@ -194,7 +211,7 @@ Following you can find the errors exported by the client.
|
||||
|Generated when a request exceeds the `requestTimeout` option.
|
||||
|
||||
|`ConnectionError`
|
||||
|Generated when an error occurs during the reequest, it can be a connection error or a malformed stream of data.
|
||||
|Generated when an error occurs during the request, it can be a connection error or a malformed stream of data.
|
||||
|
||||
|`NoLivingConnectionsError`
|
||||
|Generated in case of all connections present in the connection pool are dead.
|
||||
|
||||
78
index.d.ts
vendored
78
index.d.ts
vendored
@ -94,14 +94,15 @@ interface ClientOptions {
|
||||
nodeFilter?: nodeFilterFn;
|
||||
nodeSelector?: nodeSelectorFn | string;
|
||||
headers?: anyObject;
|
||||
opaqueIdPrefix?: string;
|
||||
generateRequestId?: generateRequestIdFn;
|
||||
name?: string;
|
||||
auth?: BasicAuth | ApiKeyAuth;
|
||||
cloud?: {
|
||||
id: string;
|
||||
// TODO: remove username and password here in 8
|
||||
username: string;
|
||||
password: string;
|
||||
username?: string;
|
||||
password?: string;
|
||||
}
|
||||
}
|
||||
|
||||
@ -151,10 +152,14 @@ declare class Client extends EventEmitter {
|
||||
forgetFollower: ApiMethod<RequestParams.CcrForgetFollower>
|
||||
get_auto_follow_pattern: ApiMethod<RequestParams.CcrGetAutoFollowPattern>
|
||||
getAutoFollowPattern: ApiMethod<RequestParams.CcrGetAutoFollowPattern>
|
||||
pause_auto_follow_pattern: ApiMethod<RequestParams.CcrPauseAutoFollowPattern>
|
||||
pauseAutoFollowPattern: ApiMethod<RequestParams.CcrPauseAutoFollowPattern>
|
||||
pause_follow: ApiMethod<RequestParams.CcrPauseFollow>
|
||||
pauseFollow: ApiMethod<RequestParams.CcrPauseFollow>
|
||||
put_auto_follow_pattern: ApiMethod<RequestParams.CcrPutAutoFollowPattern>
|
||||
putAutoFollowPattern: ApiMethod<RequestParams.CcrPutAutoFollowPattern>
|
||||
resume_auto_follow_pattern: ApiMethod<RequestParams.CcrResumeAutoFollowPattern>
|
||||
resumeAutoFollowPattern: ApiMethod<RequestParams.CcrResumeAutoFollowPattern>
|
||||
resume_follow: ApiMethod<RequestParams.CcrResumeFollow>
|
||||
resumeFollow: ApiMethod<RequestParams.CcrResumeFollow>
|
||||
stats: ApiMethod<RequestParams.CcrStats>
|
||||
@ -180,42 +185,6 @@ declare class Client extends EventEmitter {
|
||||
}
|
||||
count: ApiMethod<RequestParams.Count>
|
||||
create: ApiMethod<RequestParams.Create>
|
||||
data_frame: {
|
||||
delete_data_frame_transform: ApiMethod<RequestParams.DataFrameDeleteDataFrameTransform>
|
||||
deleteDataFrameTransform: ApiMethod<RequestParams.DataFrameDeleteDataFrameTransform>
|
||||
get_data_frame_transform: ApiMethod<RequestParams.DataFrameGetDataFrameTransform>
|
||||
getDataFrameTransform: ApiMethod<RequestParams.DataFrameGetDataFrameTransform>
|
||||
get_data_frame_transform_stats: ApiMethod<RequestParams.DataFrameGetDataFrameTransformStats>
|
||||
getDataFrameTransformStats: ApiMethod<RequestParams.DataFrameGetDataFrameTransformStats>
|
||||
preview_data_frame_transform: ApiMethod<RequestParams.DataFramePreviewDataFrameTransform>
|
||||
previewDataFrameTransform: ApiMethod<RequestParams.DataFramePreviewDataFrameTransform>
|
||||
put_data_frame_transform: ApiMethod<RequestParams.DataFramePutDataFrameTransform>
|
||||
putDataFrameTransform: ApiMethod<RequestParams.DataFramePutDataFrameTransform>
|
||||
start_data_frame_transform: ApiMethod<RequestParams.DataFrameStartDataFrameTransform>
|
||||
startDataFrameTransform: ApiMethod<RequestParams.DataFrameStartDataFrameTransform>
|
||||
stop_data_frame_transform: ApiMethod<RequestParams.DataFrameStopDataFrameTransform>
|
||||
stopDataFrameTransform: ApiMethod<RequestParams.DataFrameStopDataFrameTransform>
|
||||
update_data_frame_transform: ApiMethod<RequestParams.DataFrameUpdateDataFrameTransform>
|
||||
updateDataFrameTransform: ApiMethod<RequestParams.DataFrameUpdateDataFrameTransform>
|
||||
}
|
||||
dataFrame: {
|
||||
delete_data_frame_transform: ApiMethod<RequestParams.DataFrameDeleteDataFrameTransform>
|
||||
deleteDataFrameTransform: ApiMethod<RequestParams.DataFrameDeleteDataFrameTransform>
|
||||
get_data_frame_transform: ApiMethod<RequestParams.DataFrameGetDataFrameTransform>
|
||||
getDataFrameTransform: ApiMethod<RequestParams.DataFrameGetDataFrameTransform>
|
||||
get_data_frame_transform_stats: ApiMethod<RequestParams.DataFrameGetDataFrameTransformStats>
|
||||
getDataFrameTransformStats: ApiMethod<RequestParams.DataFrameGetDataFrameTransformStats>
|
||||
preview_data_frame_transform: ApiMethod<RequestParams.DataFramePreviewDataFrameTransform>
|
||||
previewDataFrameTransform: ApiMethod<RequestParams.DataFramePreviewDataFrameTransform>
|
||||
put_data_frame_transform: ApiMethod<RequestParams.DataFramePutDataFrameTransform>
|
||||
putDataFrameTransform: ApiMethod<RequestParams.DataFramePutDataFrameTransform>
|
||||
start_data_frame_transform: ApiMethod<RequestParams.DataFrameStartDataFrameTransform>
|
||||
startDataFrameTransform: ApiMethod<RequestParams.DataFrameStartDataFrameTransform>
|
||||
stop_data_frame_transform: ApiMethod<RequestParams.DataFrameStopDataFrameTransform>
|
||||
stopDataFrameTransform: ApiMethod<RequestParams.DataFrameStopDataFrameTransform>
|
||||
update_data_frame_transform: ApiMethod<RequestParams.DataFrameUpdateDataFrameTransform>
|
||||
updateDataFrameTransform: ApiMethod<RequestParams.DataFrameUpdateDataFrameTransform>
|
||||
}
|
||||
delete: ApiMethod<RequestParams.Delete>
|
||||
delete_by_query: ApiMethod<RequestParams.DeleteByQuery>
|
||||
deleteByQuery: ApiMethod<RequestParams.DeleteByQuery>
|
||||
@ -223,6 +192,17 @@ declare class Client extends EventEmitter {
|
||||
deleteByQueryRethrottle: ApiMethod<RequestParams.DeleteByQueryRethrottle>
|
||||
delete_script: ApiMethod<RequestParams.DeleteScript>
|
||||
deleteScript: ApiMethod<RequestParams.DeleteScript>
|
||||
enrich: {
|
||||
delete_policy: ApiMethod<RequestParams.EnrichDeletePolicy>
|
||||
deletePolicy: ApiMethod<RequestParams.EnrichDeletePolicy>
|
||||
execute_policy: ApiMethod<RequestParams.EnrichExecutePolicy>
|
||||
executePolicy: ApiMethod<RequestParams.EnrichExecutePolicy>
|
||||
get_policy: ApiMethod<RequestParams.EnrichGetPolicy>
|
||||
getPolicy: ApiMethod<RequestParams.EnrichGetPolicy>
|
||||
put_policy: ApiMethod<RequestParams.EnrichPutPolicy>
|
||||
putPolicy: ApiMethod<RequestParams.EnrichPutPolicy>
|
||||
stats: ApiMethod<RequestParams.EnrichStats>
|
||||
}
|
||||
exists: ApiMethod<RequestParams.Exists>
|
||||
exists_source: ApiMethod<RequestParams.ExistsSource>
|
||||
existsSource: ApiMethod<RequestParams.ExistsSource>
|
||||
@ -567,8 +547,12 @@ declare class Client extends EventEmitter {
|
||||
deleteLifecycle: ApiMethod<RequestParams.SlmDeleteLifecycle>
|
||||
execute_lifecycle: ApiMethod<RequestParams.SlmExecuteLifecycle>
|
||||
executeLifecycle: ApiMethod<RequestParams.SlmExecuteLifecycle>
|
||||
execute_retention: ApiMethod<RequestParams.SlmExecuteRetention>
|
||||
executeRetention: ApiMethod<RequestParams.SlmExecuteRetention>
|
||||
get_lifecycle: ApiMethod<RequestParams.SlmGetLifecycle>
|
||||
getLifecycle: ApiMethod<RequestParams.SlmGetLifecycle>
|
||||
get_stats: ApiMethod<RequestParams.SlmGetStats>
|
||||
getStats: ApiMethod<RequestParams.SlmGetStats>
|
||||
put_lifecycle: ApiMethod<RequestParams.SlmPutLifecycle>
|
||||
putLifecycle: ApiMethod<RequestParams.SlmPutLifecycle>
|
||||
}
|
||||
@ -604,6 +588,24 @@ declare class Client extends EventEmitter {
|
||||
list: ApiMethod<RequestParams.TasksList>
|
||||
}
|
||||
termvectors: ApiMethod<RequestParams.Termvectors>
|
||||
transform: {
|
||||
delete_transform: ApiMethod<RequestParams.TransformDeleteTransform>
|
||||
deleteTransform: ApiMethod<RequestParams.TransformDeleteTransform>
|
||||
get_transform: ApiMethod<RequestParams.TransformGetTransform>
|
||||
getTransform: ApiMethod<RequestParams.TransformGetTransform>
|
||||
get_transform_stats: ApiMethod<RequestParams.TransformGetTransformStats>
|
||||
getTransformStats: ApiMethod<RequestParams.TransformGetTransformStats>
|
||||
preview_transform: ApiMethod<RequestParams.TransformPreviewTransform>
|
||||
previewTransform: ApiMethod<RequestParams.TransformPreviewTransform>
|
||||
put_transform: ApiMethod<RequestParams.TransformPutTransform>
|
||||
putTransform: ApiMethod<RequestParams.TransformPutTransform>
|
||||
start_transform: ApiMethod<RequestParams.TransformStartTransform>
|
||||
startTransform: ApiMethod<RequestParams.TransformStartTransform>
|
||||
stop_transform: ApiMethod<RequestParams.TransformStopTransform>
|
||||
stopTransform: ApiMethod<RequestParams.TransformStopTransform>
|
||||
update_transform: ApiMethod<RequestParams.TransformUpdateTransform>
|
||||
updateTransform: ApiMethod<RequestParams.TransformUpdateTransform>
|
||||
}
|
||||
update: ApiMethod<RequestParams.Update>
|
||||
update_by_query: ApiMethod<RequestParams.UpdateByQuery>
|
||||
updateByQuery: ApiMethod<RequestParams.UpdateByQuery>
|
||||
|
||||
6
index.js
6
index.js
@ -79,7 +79,8 @@ class Client extends EventEmitter {
|
||||
nodeSelector: 'round-robin',
|
||||
generateRequestId: null,
|
||||
name: 'elasticsearch-js',
|
||||
auth: null
|
||||
auth: null,
|
||||
opaqueIdPrefix: null
|
||||
}, opts)
|
||||
|
||||
this[kInitialOptions] = options
|
||||
@ -121,7 +122,8 @@ class Client extends EventEmitter {
|
||||
nodeFilter: options.nodeFilter,
|
||||
nodeSelector: options.nodeSelector,
|
||||
generateRequestId: options.generateRequestId,
|
||||
name: options.name
|
||||
name: options.name,
|
||||
opaqueIdPrefix: options.opaqueIdPrefix
|
||||
})
|
||||
|
||||
const apis = buildApi({
|
||||
|
||||
5
lib/Transport.d.ts
vendored
5
lib/Transport.d.ts
vendored
@ -38,6 +38,7 @@ interface TransportOptions {
|
||||
headers?: anyObject;
|
||||
generateRequestId?: generateRequestIdFn;
|
||||
name: string;
|
||||
opaqueIdPrefix?: string;
|
||||
}
|
||||
|
||||
export interface RequestEvent<T = any, C = any> {
|
||||
@ -80,7 +81,7 @@ export interface TransportRequestParams {
|
||||
}
|
||||
|
||||
export interface TransportRequestOptions {
|
||||
ignore?: [number];
|
||||
ignore?: number[];
|
||||
requestTimeout?: number | string;
|
||||
maxRetries?: number;
|
||||
asStream?: boolean;
|
||||
@ -90,6 +91,7 @@ export interface TransportRequestOptions {
|
||||
id?: any;
|
||||
context?: any;
|
||||
warnings?: [string];
|
||||
opaqueId?: string;
|
||||
}
|
||||
|
||||
export interface TransportRequestCallback {
|
||||
@ -121,6 +123,7 @@ export default class Transport {
|
||||
compression: 'gzip' | false;
|
||||
sniffInterval: number;
|
||||
sniffOnConnectionFault: boolean;
|
||||
opaqueIdPrefix: string | null;
|
||||
sniffEndpoint: string;
|
||||
_sniffEnabled: boolean;
|
||||
_nextSniff: number;
|
||||
|
||||
@ -41,6 +41,7 @@ class Transport {
|
||||
this.sniffEndpoint = opts.sniffEndpoint
|
||||
this.generateRequestId = opts.generateRequestId || generateRequestId()
|
||||
this.name = opts.name
|
||||
this.opaqueIdPrefix = opts.opaqueIdPrefix
|
||||
|
||||
this.nodeFilter = opts.nodeFilter || defaultNodeFilter
|
||||
if (typeof opts.nodeSelector === 'function') {
|
||||
@ -108,12 +109,18 @@ class Transport {
|
||||
if (meta.aborted === true) return
|
||||
meta.connection = this.getConnection({ requestId: meta.request.id })
|
||||
if (meta.connection === null) {
|
||||
return callback(new NoLivingConnectionsError('There are not living connections'), result)
|
||||
return callback(new NoLivingConnectionsError('There are no living connections'), result)
|
||||
}
|
||||
|
||||
// TODO: make this assignment FAST
|
||||
const headers = Object.assign({}, this.headers, options.headers)
|
||||
|
||||
if (options.opaqueId !== undefined) {
|
||||
headers['X-Opaque-Id'] = this.opaqueIdPrefix !== null
|
||||
? this.opaqueIdPrefix + options.opaqueId
|
||||
: options.opaqueId
|
||||
}
|
||||
|
||||
// handle json body
|
||||
if (params.body != null) {
|
||||
if (shouldSerialize(params.body) === true) {
|
||||
@ -123,8 +130,9 @@ class Transport {
|
||||
return callback(err, result)
|
||||
}
|
||||
}
|
||||
headers['Content-Type'] = headers['Content-Type'] || 'application/json'
|
||||
|
||||
if (params.body !== '') {
|
||||
headers['Content-Type'] = headers['Content-Type'] || 'application/json'
|
||||
if (compression === 'gzip') {
|
||||
if (isStream(params.body) === false) {
|
||||
params.body = intoStream(params.body).pipe(createGzip())
|
||||
@ -133,6 +141,7 @@ class Transport {
|
||||
}
|
||||
headers['Content-Encoding'] = compression
|
||||
}
|
||||
}
|
||||
|
||||
if (isStream(params.body) === false) {
|
||||
headers['Content-Length'] = '' + Buffer.byteLength(params.body)
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||
"version": "7.4.0",
|
||||
"version": "7.5.1",
|
||||
"keywords": [
|
||||
"elasticsearch",
|
||||
"elastic",
|
||||
@ -19,7 +19,7 @@
|
||||
"test": "npm run lint && npm run test:unit && npm run test:behavior && npm run test:types",
|
||||
"test:unit": "tap test/unit/*.test.js -t 300 --no-coverage",
|
||||
"test:behavior": "tap test/behavior/*.test.js -t 300 --no-coverage",
|
||||
"test:integration": "tap test/integration/index.js -T --no-coverage",
|
||||
"test:integration": "node test/integration/index.js",
|
||||
"test:types": "tsc --project ./test/types/tsconfig.json",
|
||||
"test:coverage": "nyc tap test/unit/*.test.js test/behavior/*.test.js -t 300 && nyc report --reporter=text-lcov > coverage.lcov && codecov",
|
||||
"lint": "standard",
|
||||
@ -44,6 +44,7 @@
|
||||
"dedent": "^0.7.0",
|
||||
"deepmerge": "^4.0.0",
|
||||
"dezalgo": "^1.0.3",
|
||||
"fast-deep-equal": "^3.1.1",
|
||||
"js-yaml": "^3.13.1",
|
||||
"license-checker": "^25.0.1",
|
||||
"lolex": "^4.0.1",
|
||||
|
||||
@ -48,6 +48,7 @@ function start (opts) {
|
||||
|
||||
const apiFolderContents = readdirSync(apiFolder)
|
||||
const xPackFolderContents = readdirSync(xPackFolder)
|
||||
.filter(file => !file.startsWith('data_frame_transform_deprecated'))
|
||||
|
||||
apiFolderContents.forEach(generateApiFile(apiFolder, log))
|
||||
xPackFolderContents.forEach(generateApiFile(xPackFolder, log))
|
||||
|
||||
@ -4,8 +4,14 @@
|
||||
|
||||
'use strict'
|
||||
|
||||
const { readdirSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
const dedent = require('dedent')
|
||||
|
||||
const codeExamples = readdirSync(join(__dirname, '..', '..', 'docs', 'examples'))
|
||||
.map(file => file.slice(0, -9))
|
||||
.filter(api => api !== 'index')
|
||||
|
||||
function generateDocs (common, spec) {
|
||||
var doc = dedent`
|
||||
[[api-reference]]
|
||||
@ -67,7 +73,7 @@ function commonParameters (spec) {
|
||||
=== Common parameters
|
||||
Parameters that are accepted by all API endpoints.
|
||||
|
||||
link:{ref}/common-options.html[Reference]
|
||||
link:{ref}/common-options.html[Documentation]
|
||||
[cols=2*]
|
||||
|===\n`
|
||||
Object.keys(spec.params).forEach(key => {
|
||||
@ -170,7 +176,10 @@ function generateApiDoc (spec) {
|
||||
client.${camelify(name)}(${codeParameters.length > 0 ? `{\n ${codeParameters}\n}` : ''})
|
||||
----\n`
|
||||
if (documentationUrl) {
|
||||
doc += `link:${documentationUrl}[Reference]\n`
|
||||
doc += `link:${documentationUrl}[Documentation] +\n`
|
||||
}
|
||||
if (codeExamples.includes(name)) {
|
||||
doc += `{jsclient}/${name.replace(/\./g, '_')}_examples.html[Code Example] +\n`
|
||||
}
|
||||
|
||||
if (params.length !== 0) {
|
||||
|
||||
@ -13,6 +13,7 @@ const esDefaultRoles = [
|
||||
'code_user',
|
||||
'data_frame_transforms_admin',
|
||||
'data_frame_transforms_user',
|
||||
'enrich_user',
|
||||
'ingest_admin',
|
||||
'kibana_dashboard_only_user',
|
||||
'kibana_system',
|
||||
@ -29,6 +30,8 @@ const esDefaultRoles = [
|
||||
'rollup_user',
|
||||
'snapshot_user',
|
||||
'superuser',
|
||||
'transform_admin',
|
||||
'transform_user',
|
||||
'transport_client',
|
||||
'watcher_admin',
|
||||
'watcher_user'
|
||||
|
||||
@ -8,16 +8,20 @@ const { readFileSync, accessSync, mkdirSync, readdirSync, statSync } = require('
|
||||
const { join, sep } = require('path')
|
||||
const yaml = require('js-yaml')
|
||||
const Git = require('simple-git')
|
||||
const tap = require('tap')
|
||||
const { Client } = require('../../index')
|
||||
const TestRunner = require('./test-runner')
|
||||
const build = require('./test-runner')
|
||||
const { sleep } = require('./helper')
|
||||
const ms = require('ms')
|
||||
|
||||
const esRepo = 'https://github.com/elastic/elasticsearch.git'
|
||||
const esFolder = join(__dirname, '..', '..', 'elasticsearch')
|
||||
const yamlFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'test')
|
||||
const xPackYamlFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'test')
|
||||
|
||||
const MAX_API_TIME = 1000 * 90
|
||||
const MAX_FILE_TIME = 1000 * 30
|
||||
const MAX_TEST_TIME = 1000 * 2
|
||||
|
||||
const ossSkips = {
|
||||
'cat.indices/10_basic.yml': ['Test cat indices output for closed index (pre 7.2.0)'],
|
||||
'cluster.health/10_basic.yml': ['cluster health with closed index (pre 7.2.0)'],
|
||||
@ -68,8 +72,7 @@ const xPackBlackList = {
|
||||
'xpack/15_basic.yml': ['*']
|
||||
}
|
||||
|
||||
class Runner {
|
||||
constructor (opts = {}) {
|
||||
function runner (opts = {}) {
|
||||
const options = { node: opts.node }
|
||||
if (opts.isXPack) {
|
||||
options.ssl = {
|
||||
@ -77,41 +80,38 @@ class Runner {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
}
|
||||
this.client = new Client(options)
|
||||
console.log('Loading yaml suite')
|
||||
}
|
||||
const client = new Client(options)
|
||||
log('Loading yaml suite')
|
||||
start({ client, isXPack: opts.isXPack })
|
||||
.catch(console.log)
|
||||
}
|
||||
|
||||
async waitCluster (client, times = 0) {
|
||||
async function waitCluster (client, times = 0) {
|
||||
try {
|
||||
await client.cluster.health({ waitForStatus: 'green', timeout: '50s' })
|
||||
} catch (err) {
|
||||
if (++times < 10) {
|
||||
await sleep(5000)
|
||||
return this.waitCluster(client, times)
|
||||
return waitCluster(client, times)
|
||||
}
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async start ({ isXPack }) {
|
||||
const { client } = this
|
||||
const parse = this.parse.bind(this)
|
||||
|
||||
console.log('Waiting for Elasticsearch')
|
||||
await this.waitCluster(client)
|
||||
async function start ({ client, isXPack }) {
|
||||
log('Waiting for Elasticsearch')
|
||||
await waitCluster(client)
|
||||
|
||||
const { body } = await client.info()
|
||||
const { number: version, build_hash: sha } = body.version
|
||||
|
||||
console.log(`Checking out sha ${sha}...`)
|
||||
await this.withSHA(sha)
|
||||
log(`Checking out sha ${sha}...`)
|
||||
await withSHA(sha)
|
||||
|
||||
console.log(`Testing ${isXPack ? 'XPack' : 'oss'} api...`)
|
||||
log(`Testing ${isXPack ? 'XPack' : 'oss'} api...`)
|
||||
|
||||
const folders = []
|
||||
.concat(getAllFiles(yamlFolder))
|
||||
.concat(isXPack ? getAllFiles(xPackYamlFolder) : [])
|
||||
const folders = getAllFiles(isXPack ? xPackYamlFolder : yamlFolder)
|
||||
.filter(t => !/(README|TODO)/g.test(t))
|
||||
// we cluster the array based on the folder names,
|
||||
// to provide a better test log output
|
||||
@ -129,6 +129,7 @@ class Runner {
|
||||
return arr
|
||||
}, [])
|
||||
|
||||
const totalTime = now()
|
||||
for (const folder of folders) {
|
||||
// pretty name
|
||||
const apiName = folder[0].slice(
|
||||
@ -136,8 +137,16 @@ class Runner {
|
||||
folder[0].lastIndexOf(sep)
|
||||
)
|
||||
|
||||
tap.test(`Testing ${apiName}`, { bail: true, timeout: 0 }, t => {
|
||||
log('Testing ' + apiName.slice(1))
|
||||
const apiTime = now()
|
||||
|
||||
for (const file of folder) {
|
||||
const testRunner = build({
|
||||
client,
|
||||
version,
|
||||
isXPack: file.includes('x-pack')
|
||||
})
|
||||
const fileTime = now()
|
||||
const data = readFileSync(file, 'utf8')
|
||||
// get the test yaml (as object), some file has multiple yaml documents inside,
|
||||
// every document is separated by '---', so we split on the separator
|
||||
@ -148,17 +157,6 @@ class Runner {
|
||||
.filter(Boolean)
|
||||
.map(parse)
|
||||
|
||||
t.test(
|
||||
file.slice(file.lastIndexOf(apiName)),
|
||||
testFile(file, tests)
|
||||
)
|
||||
}
|
||||
t.end()
|
||||
})
|
||||
}
|
||||
|
||||
function testFile (file, tests) {
|
||||
return t => {
|
||||
// get setup and teardown if present
|
||||
var setupTest = null
|
||||
var teardownTest = null
|
||||
@ -167,28 +165,55 @@ class Runner {
|
||||
if (test.teardown) teardownTest = test.teardown
|
||||
}
|
||||
|
||||
tests.forEach(test => {
|
||||
const cleanPath = file.slice(file.lastIndexOf(apiName))
|
||||
log(' ' + cleanPath)
|
||||
|
||||
for (const test of tests) {
|
||||
const testTime = now()
|
||||
const name = Object.keys(test)[0]
|
||||
if (name === 'setup' || name === 'teardown') return
|
||||
if (shouldSkip(t, isXPack, file, name)) return
|
||||
|
||||
// create a subtest for the specific folder + test file + test name
|
||||
t.test(name, async t => {
|
||||
const testRunner = new TestRunner({
|
||||
client,
|
||||
version,
|
||||
tap: t,
|
||||
isXPack: file.includes('x-pack')
|
||||
})
|
||||
if (name === 'setup' || name === 'teardown') continue
|
||||
if (shouldSkip(isXPack, file, name)) continue
|
||||
log(' - ' + name)
|
||||
try {
|
||||
await testRunner.run(setupTest, test[name], teardownTest)
|
||||
})
|
||||
})
|
||||
t.end()
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
const totalTestTime = now() - testTime
|
||||
if (totalTestTime > MAX_TEST_TIME) {
|
||||
log(' took too long: ' + ms(totalTestTime))
|
||||
} else {
|
||||
log(' took: ' + ms(totalTestTime))
|
||||
}
|
||||
}
|
||||
const totalFileTime = now() - fileTime
|
||||
if (totalFileTime > MAX_FILE_TIME) {
|
||||
log(` ${cleanPath} took too long: ` + ms(totalFileTime))
|
||||
} else {
|
||||
log(` ${cleanPath} took: ` + ms(totalFileTime))
|
||||
}
|
||||
}
|
||||
const totalApiTime = now() - apiTime
|
||||
if (totalApiTime > MAX_API_TIME) {
|
||||
log(`${apiName} took too long: ` + ms(totalApiTime))
|
||||
} else {
|
||||
log(`${apiName} took: ` + ms(totalApiTime))
|
||||
}
|
||||
}
|
||||
log(`Total testing time: ${ms(now() - totalTime)}`)
|
||||
}
|
||||
|
||||
parse (data) {
|
||||
function log (text) {
|
||||
process.stdout.write(text + '\n')
|
||||
}
|
||||
|
||||
function now () {
|
||||
var ts = process.hrtime()
|
||||
return (ts[0] * 1e3) + (ts[1] / 1e6)
|
||||
}
|
||||
|
||||
function parse (data) {
|
||||
try {
|
||||
var doc = yaml.safeLoad(data)
|
||||
} catch (err) {
|
||||
@ -196,14 +221,9 @@ class Runner {
|
||||
return
|
||||
}
|
||||
return doc
|
||||
}
|
||||
}
|
||||
|
||||
getTest (folder) {
|
||||
const tests = readdirSync(folder)
|
||||
return tests.filter(t => !/(README|TODO)/g.test(t))
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Sets the elasticsearch repository to the given sha.
|
||||
* If the repository is not present in `esFolder` it will
|
||||
* clone the repository and the checkout the sha.
|
||||
@ -212,17 +232,17 @@ class Runner {
|
||||
* @param {string} sha
|
||||
* @param {function} callback
|
||||
*/
|
||||
withSHA (sha) {
|
||||
function withSHA (sha) {
|
||||
return new Promise((resolve, reject) => {
|
||||
_withSHA.call(this, err => err ? reject(err) : resolve())
|
||||
_withSHA(err => err ? reject(err) : resolve())
|
||||
})
|
||||
|
||||
function _withSHA (callback) {
|
||||
var fresh = false
|
||||
var retry = 0
|
||||
|
||||
if (!this.pathExist(esFolder)) {
|
||||
if (!this.createFolder(esFolder)) {
|
||||
if (!pathExist(esFolder)) {
|
||||
if (!createFolder(esFolder)) {
|
||||
return callback(new Error('Failed folder creation'))
|
||||
}
|
||||
fresh = true
|
||||
@ -237,7 +257,7 @@ class Runner {
|
||||
}
|
||||
|
||||
function checkout () {
|
||||
console.log(`Checking out sha '${sha}'`)
|
||||
log(`Checking out sha '${sha}'`)
|
||||
git.checkout(sha, err => {
|
||||
if (err) {
|
||||
if (retry++ > 0) {
|
||||
@ -250,7 +270,7 @@ class Runner {
|
||||
}
|
||||
|
||||
function pull (cb) {
|
||||
console.log('Pulling elasticsearch repository...')
|
||||
log('Pulling elasticsearch repository...')
|
||||
git.pull(err => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
@ -260,7 +280,7 @@ class Runner {
|
||||
}
|
||||
|
||||
function clone (cb) {
|
||||
console.log('Cloning elasticsearch repository...')
|
||||
log('Cloning elasticsearch repository...')
|
||||
git.clone(esRepo, esFolder, err => {
|
||||
if (err) {
|
||||
return callback(err)
|
||||
@ -269,35 +289,34 @@ class Runner {
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Checks if the given path exists
|
||||
* @param {string} path
|
||||
* @returns {boolean} true if exists, false if not
|
||||
*/
|
||||
pathExist (path) {
|
||||
function pathExist (path) {
|
||||
try {
|
||||
accessSync(path)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Creates the given folder
|
||||
* @param {string} name
|
||||
* @returns {boolean} true on success, false on failure
|
||||
*/
|
||||
createFolder (name) {
|
||||
function createFolder (name) {
|
||||
try {
|
||||
mkdirSync(name)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
@ -306,18 +325,17 @@ if (require.main === module) {
|
||||
node,
|
||||
isXPack: node.indexOf('@') > -1
|
||||
}
|
||||
const runner = new Runner(opts)
|
||||
runner.start(opts).catch(console.log)
|
||||
runner(opts)
|
||||
}
|
||||
|
||||
const shouldSkip = (t, isXPack, file, name) => {
|
||||
const shouldSkip = (isXPack, file, name) => {
|
||||
var list = Object.keys(ossSkips)
|
||||
for (var i = 0; i < list.length; i++) {
|
||||
const ossTest = ossSkips[list[i]]
|
||||
for (var j = 0; j < ossTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === ossTest[j] || ossTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
t.comment(`Skipping test ${testName} because is blacklisted in the oss test`)
|
||||
log(`Skipping test ${testName} because is blacklisted in the oss test`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -330,7 +348,7 @@ const shouldSkip = (t, isXPack, file, name) => {
|
||||
for (j = 0; j < platTest.length; j++) {
|
||||
if (file.endsWith(list[i]) && (name === platTest[j] || platTest[j] === '*')) {
|
||||
const testName = file.slice(file.indexOf(`${sep}elasticsearch${sep}`)) + ' / ' + name
|
||||
t.comment(`Skipping test ${testName} because is blacklisted in the XPack test`)
|
||||
log(`Skipping test ${testName} because is blacklisted in the XPack test`)
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -347,4 +365,4 @@ const getAllFiles = dir =>
|
||||
return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name]
|
||||
}, [])
|
||||
|
||||
module.exports = Runner
|
||||
module.exports = runner
|
||||
|
||||
@ -6,9 +6,10 @@
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
|
||||
const t = require('tap')
|
||||
const assert = require('assert')
|
||||
const semver = require('semver')
|
||||
const helper = require('./helper')
|
||||
const deepEqual = require('fast-deep-equal')
|
||||
const { ConfigurationError } = require('../../lib/errors')
|
||||
|
||||
const { delve, to } = helper
|
||||
@ -25,63 +26,58 @@ const supportedFeatures = [
|
||||
'arbitrary_key'
|
||||
]
|
||||
|
||||
class TestRunner {
|
||||
constructor (opts = {}) {
|
||||
opts = opts || {}
|
||||
|
||||
this.client = opts.client
|
||||
this.esVersion = opts.version
|
||||
this.response = null
|
||||
this.stash = new Map()
|
||||
this.tap = opts.tap || t
|
||||
this.isXPack = opts.isXPack
|
||||
}
|
||||
function build (opts = {}) {
|
||||
const client = opts.client
|
||||
const esVersion = opts.version
|
||||
const isXPack = opts.isXPack
|
||||
const stash = new Map()
|
||||
let response = null
|
||||
|
||||
/**
|
||||
* Runs a cleanup, removes all indices, aliases, templates, and snapshots
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async cleanup () {
|
||||
this.tap.comment('Cleanup')
|
||||
async function cleanup () {
|
||||
// // tap.comment('Cleanup')
|
||||
|
||||
this.response = null
|
||||
this.stash = new Map()
|
||||
response = null
|
||||
stash.clear()
|
||||
|
||||
try {
|
||||
await this.client.indices.delete({ index: '_all' }, { ignore: 404 })
|
||||
await client.indices.delete({ index: '_all' }, { ignore: 404 })
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: indices.delete')
|
||||
assert.ifError(err, 'should not error: indices.delete')
|
||||
}
|
||||
|
||||
try {
|
||||
await this.client.indices.deleteAlias({ index: '_all', name: '_all' }, { ignore: 404 })
|
||||
await client.indices.deleteAlias({ index: '_all', name: '_all' }, { ignore: 404 })
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: indices.deleteAlias')
|
||||
assert.ifError(err, 'should not error: indices.deleteAlias')
|
||||
}
|
||||
|
||||
try {
|
||||
const { body: templates } = await this.client.indices.getTemplate()
|
||||
const { body: templates } = await client.indices.getTemplate()
|
||||
await helper.runInParallel(
|
||||
this.client, 'indices.deleteTemplate',
|
||||
client, 'indices.deleteTemplate',
|
||||
Object.keys(templates).map(t => ({ name: t }))
|
||||
)
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: indices.deleteTemplate')
|
||||
assert.ifError(err, 'should not error: indices.deleteTemplate')
|
||||
}
|
||||
|
||||
try {
|
||||
const { body: repositories } = await this.client.snapshot.getRepository()
|
||||
const { body: repositories } = await client.snapshot.getRepository()
|
||||
for (const repository of Object.keys(repositories)) {
|
||||
const { body: snapshots } = await this.client.snapshot.get({ repository, snapshot: '_all' })
|
||||
const { body: snapshots } = await client.snapshot.get({ repository, snapshot: '_all' })
|
||||
await helper.runInParallel(
|
||||
this.client, 'snapshot.delete',
|
||||
client, 'snapshot.delete',
|
||||
Object.keys(snapshots).map(snapshot => ({ snapshot, repository })),
|
||||
{ ignore: [404] }
|
||||
)
|
||||
await this.client.snapshot.deleteRepository({ repository }, { ignore: [404] })
|
||||
await client.snapshot.deleteRepository({ repository }, { ignore: [404] })
|
||||
}
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: snapshot.delete / snapshot.deleteRepository')
|
||||
assert.ifError(err, 'should not error: snapshot.delete / snapshot.deleteRepository')
|
||||
}
|
||||
}
|
||||
|
||||
@ -90,33 +86,33 @@ class TestRunner {
|
||||
* This set of calls should be executed before the final clenup.
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async cleanupXPack () {
|
||||
this.tap.comment('XPack Cleanup')
|
||||
async function cleanupXPack () {
|
||||
// tap.comment('XPack Cleanup')
|
||||
|
||||
try {
|
||||
const { body } = await this.client.security.getRole()
|
||||
const { body } = await client.security.getRole()
|
||||
const roles = Object.keys(body).filter(n => helper.esDefaultRoles.indexOf(n) === -1)
|
||||
await helper.runInParallel(
|
||||
this.client, 'security.deleteRole',
|
||||
client, 'security.deleteRole',
|
||||
roles.map(r => ({ name: r }))
|
||||
)
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: security role cleanup')
|
||||
assert.ifError(err, 'should not error: security role cleanup')
|
||||
}
|
||||
|
||||
try {
|
||||
const { body } = await this.client.security.getUser()
|
||||
const { body } = await client.security.getUser()
|
||||
const users = Object.keys(body).filter(n => helper.esDefaultUsers.indexOf(n) === -1)
|
||||
await helper.runInParallel(
|
||||
this.client, 'security.deleteUser',
|
||||
client, 'security.deleteUser',
|
||||
users.map(r => ({ username: r }))
|
||||
)
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: security user cleanup')
|
||||
assert.ifError(err, 'should not error: security user cleanup')
|
||||
}
|
||||
|
||||
try {
|
||||
const { body } = await this.client.security.getPrivileges()
|
||||
const { body } = await client.security.getPrivileges()
|
||||
const privileges = []
|
||||
Object.keys(body).forEach(app => {
|
||||
Object.keys(body[app]).forEach(priv => {
|
||||
@ -126,52 +122,52 @@ class TestRunner {
|
||||
})
|
||||
})
|
||||
})
|
||||
await helper.runInParallel(this.client, 'security.deletePrivileges', privileges)
|
||||
await helper.runInParallel(client, 'security.deletePrivileges', privileges)
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: security privileges cleanup')
|
||||
assert.ifError(err, 'should not error: security privileges cleanup')
|
||||
}
|
||||
|
||||
try {
|
||||
await this.client.ml.stopDatafeed({ datafeedId: '*', force: true })
|
||||
const { body } = await this.client.ml.getDatafeeds({ datafeedId: '*' })
|
||||
await client.ml.stopDatafeed({ datafeedId: '*', force: true })
|
||||
const { body } = await client.ml.getDatafeeds({ datafeedId: '*' })
|
||||
const feeds = body.datafeeds.map(f => f.datafeed_id)
|
||||
await helper.runInParallel(
|
||||
this.client, 'ml.deleteDatafeed',
|
||||
client, 'ml.deleteDatafeed',
|
||||
feeds.map(f => ({ datafeedId: f }))
|
||||
)
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should error: not ml datafeed cleanup')
|
||||
assert.ifError(err, 'should error: not ml datafeed cleanup')
|
||||
}
|
||||
|
||||
try {
|
||||
await this.client.ml.closeJob({ jobId: '*', force: true })
|
||||
const { body } = await this.client.ml.getJobs({ jobId: '*' })
|
||||
await client.ml.closeJob({ jobId: '*', force: true })
|
||||
const { body } = await client.ml.getJobs({ jobId: '*' })
|
||||
const jobs = body.jobs.map(j => j.job_id)
|
||||
await helper.runInParallel(
|
||||
this.client, 'ml.deleteJob',
|
||||
client, 'ml.deleteJob',
|
||||
jobs.map(j => ({ jobId: j, waitForCompletion: true, force: true }))
|
||||
)
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: ml job cleanup')
|
||||
assert.ifError(err, 'should not error: ml job cleanup')
|
||||
}
|
||||
|
||||
try {
|
||||
const { body } = await this.client.rollup.getJobs({ id: '_all' })
|
||||
const { body } = await client.rollup.getJobs({ id: '_all' })
|
||||
const jobs = body.jobs.map(j => j.config.id)
|
||||
await helper.runInParallel(
|
||||
this.client, 'rollup.stopJob',
|
||||
client, 'rollup.stopJob',
|
||||
jobs.map(j => ({ id: j, waitForCompletion: true }))
|
||||
)
|
||||
await helper.runInParallel(
|
||||
this.client, 'rollup.deleteJob',
|
||||
client, 'rollup.deleteJob',
|
||||
jobs.map(j => ({ id: j }))
|
||||
)
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: rollup jobs cleanup')
|
||||
assert.ifError(err, 'should not error: rollup jobs cleanup')
|
||||
}
|
||||
|
||||
try {
|
||||
const { body } = await this.client.tasks.list()
|
||||
const { body } = await client.tasks.list()
|
||||
const tasks = Object.keys(body.nodes)
|
||||
.reduce((acc, node) => {
|
||||
const { tasks } = body.nodes[node]
|
||||
@ -182,24 +178,24 @@ class TestRunner {
|
||||
}, [])
|
||||
|
||||
await helper.runInParallel(
|
||||
this.client, 'tasks.cancel',
|
||||
client, 'tasks.cancel',
|
||||
tasks.map(id => ({ taskId: id }))
|
||||
)
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: tasks cleanup')
|
||||
assert.ifError(err, 'should not error: tasks cleanup')
|
||||
}
|
||||
|
||||
try {
|
||||
await this.client.ilm.removePolicy({ index: '_all' })
|
||||
await client.ilm.removePolicy({ index: '_all' })
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: ilm.removePolicy')
|
||||
assert.ifError(err, 'should not error: ilm.removePolicy')
|
||||
}
|
||||
|
||||
// refresh the all indexes
|
||||
try {
|
||||
await this.client.indices.refresh({ index: '_all' })
|
||||
await client.indices.refresh({ index: '_all' })
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: indices.refresh')
|
||||
assert.ifError(err, 'should not error: indices.refresh')
|
||||
}
|
||||
}
|
||||
|
||||
@ -218,117 +214,37 @@ class TestRunner {
|
||||
* @oaram {object} teardown (null if not needed)
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async run (setup, test, teardown) {
|
||||
async function run (setup, test, teardown) {
|
||||
// if we should skip a feature in the setup/teardown section
|
||||
// we should skip the entire test file
|
||||
const skip = getSkip(setup) || getSkip(teardown)
|
||||
if (skip && this.shouldSkip(skip)) {
|
||||
this.skip(skip)
|
||||
if (skip && shouldSkip(esVersion, skip)) {
|
||||
logSkip(skip)
|
||||
return
|
||||
}
|
||||
|
||||
if (this.isXPack) {
|
||||
if (isXPack) {
|
||||
// Some xpack test requires this user
|
||||
this.tap.comment('Creating x-pack user')
|
||||
// tap.comment('Creating x-pack user')
|
||||
try {
|
||||
await this.client.security.putUser({
|
||||
await client.security.putUser({
|
||||
username: 'x_pack_rest_user',
|
||||
body: { password: 'x-pack-test-password', roles: ['superuser'] }
|
||||
})
|
||||
} catch (err) {
|
||||
this.tap.error(err, 'should not error: security.putUser')
|
||||
assert.ifError(err, 'should not error: security.putUser')
|
||||
}
|
||||
}
|
||||
|
||||
if (setup) await this.exec('Setup', setup)
|
||||
if (setup) await exec('Setup', setup)
|
||||
|
||||
await this.exec('Test', test)
|
||||
await exec('Test', test)
|
||||
|
||||
if (teardown) await this.exec('Teardown', teardown)
|
||||
if (teardown) await exec('Teardown', teardown)
|
||||
|
||||
if (this.isXPack) await this.cleanupXPack()
|
||||
if (isXPack) await cleanupXPack()
|
||||
|
||||
await this.cleanup()
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a skip
|
||||
* @param {object} the actions
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
skip (action) {
|
||||
if (action.reason && action.version) {
|
||||
this.tap.comment(`Skip: ${action.reason} (${action.version})`)
|
||||
} else if (action.features) {
|
||||
this.tap.comment(`Skip: ${JSON.stringify(action.features)})`)
|
||||
} else {
|
||||
this.tap.comment('Skipped')
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Decides if a test should be skipped
|
||||
* @param {object} the actions
|
||||
* @returns {boolean}
|
||||
*/
|
||||
shouldSkip (action) {
|
||||
var shouldSkip = false
|
||||
// skip based on the version
|
||||
if (action.version) {
|
||||
if (action.version.trim() === 'all') return true
|
||||
const [min, max] = action.version.split('-').map(v => v.trim())
|
||||
// if both `min` and `max` are specified
|
||||
if (min && max) {
|
||||
shouldSkip = semver.satisfies(this.esVersion, action.version)
|
||||
// if only `min` is specified
|
||||
} else if (min) {
|
||||
shouldSkip = semver.gte(this.esVersion, min)
|
||||
// if only `max` is specified
|
||||
} else if (max) {
|
||||
shouldSkip = semver.lte(this.esVersion, max)
|
||||
// something went wrong!
|
||||
} else {
|
||||
throw new Error(`skip: Bad version range: ${action.version}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldSkip) return true
|
||||
|
||||
if (action.features) {
|
||||
if (!Array.isArray(action.features)) action.features = [action.features]
|
||||
// returns true if one of the features is not present in the supportedFeatures
|
||||
shouldSkip = !!action.features.filter(f => !~supportedFeatures.indexOf(f)).length
|
||||
}
|
||||
|
||||
if (shouldSkip) return true
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the array syntax of keys and values
|
||||
* eg: 'hits.hits.1.stuff' to 'hits.hits[1].stuff'
|
||||
* @param {object} the action to update
|
||||
* @returns {obj} the updated action
|
||||
*/
|
||||
updateArraySyntax (obj) {
|
||||
const newObj = {}
|
||||
|
||||
for (const key in obj) {
|
||||
const newKey = key.replace(/\.\d{1,}\./g, v => `[${v.slice(1, -1)}].`)
|
||||
const val = obj[key]
|
||||
|
||||
if (typeof val === 'string') {
|
||||
newObj[newKey] = val.replace(/\.\d{1,}\./g, v => `[${v.slice(1, -1)}].`)
|
||||
} else if (val !== null && typeof val === 'object') {
|
||||
newObj[newKey] = this.updateArraySyntax(val)
|
||||
} else {
|
||||
newObj[newKey] = val
|
||||
}
|
||||
}
|
||||
|
||||
return newObj
|
||||
await cleanup()
|
||||
}
|
||||
|
||||
/**
|
||||
@ -340,9 +256,9 @@ class TestRunner {
|
||||
* @param {object|string} the action to update
|
||||
* @returns {object|string} the updated action
|
||||
*/
|
||||
fillStashedValues (obj) {
|
||||
function fillStashedValues (obj) {
|
||||
if (typeof obj === 'string') {
|
||||
return getStashedValues.call(this, obj)
|
||||
return getStashedValues(obj)
|
||||
}
|
||||
// iterate every key of the object
|
||||
for (const key in obj) {
|
||||
@ -355,7 +271,7 @@ class TestRunner {
|
||||
const start = val.indexOf('${')
|
||||
const end = val.indexOf('}', val.indexOf('${'))
|
||||
const stashedKey = val.slice(start + 2, end)
|
||||
const stashed = this.stash.get(stashedKey)
|
||||
const stashed = stash.get(stashedKey)
|
||||
obj[key] = val.slice(0, start) + stashed + val.slice(end + 1)
|
||||
continue
|
||||
}
|
||||
@ -364,7 +280,7 @@ class TestRunner {
|
||||
const start = val.indexOf('"$')
|
||||
const end = val.indexOf('"', start + 1)
|
||||
const stashedKey = val.slice(start + 2, end)
|
||||
const stashed = '"' + this.stash.get(stashedKey) + '"'
|
||||
const stashed = '"' + stash.get(stashedKey) + '"'
|
||||
obj[key] = val.slice(0, start) + stashed + val.slice(end + 1)
|
||||
continue
|
||||
}
|
||||
@ -372,13 +288,13 @@ class TestRunner {
|
||||
// we run the "update value" code
|
||||
if (typeof val === 'string' && val.includes('$')) {
|
||||
// update the key value
|
||||
obj[key] = getStashedValues.call(this, val)
|
||||
obj[key] = getStashedValues(val)
|
||||
continue
|
||||
}
|
||||
|
||||
// go deep in the object
|
||||
if (val !== null && typeof val === 'object') {
|
||||
this.fillStashedValues(val)
|
||||
fillStashedValues(val)
|
||||
}
|
||||
}
|
||||
|
||||
@ -392,7 +308,7 @@ class TestRunner {
|
||||
// we update every field that start with '$'
|
||||
.map(part => {
|
||||
if (part[0] === '$') {
|
||||
const stashed = this.stash.get(part.slice(1))
|
||||
const stashed = stash.get(part.slice(1))
|
||||
if (stashed == null) {
|
||||
throw new Error(`Cannot find stashed value '${part}' for '${JSON.stringify(obj)}'`)
|
||||
}
|
||||
@ -414,22 +330,21 @@ class TestRunner {
|
||||
* @param {string} the name to identify the stashed value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
set (key, name) {
|
||||
function set (key, name) {
|
||||
if (key.includes('_arbitrary_key_')) {
|
||||
var currentVisit = null
|
||||
for (const path of key.split('.')) {
|
||||
if (path === '_arbitrary_key_') {
|
||||
const keys = Object.keys(currentVisit)
|
||||
const arbitraryKey = keys[getRandomInt(0, keys.length)]
|
||||
this.stash.set(name, arbitraryKey)
|
||||
stash.set(name, arbitraryKey)
|
||||
} else {
|
||||
currentVisit = delve(this.response, path)
|
||||
currentVisit = delve(response, path)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.stash.set(name, delve(this.response, key))
|
||||
stash.set(name, delve(response, key))
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
@ -438,18 +353,17 @@ class TestRunner {
|
||||
* @param {string} the transformation function as string
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
transform_and_set (name, transform) {
|
||||
function transform_and_set (name, transform) {
|
||||
if (/base64EncodeCredentials/.test(transform)) {
|
||||
const [user, password] = transform
|
||||
.slice(transform.indexOf('(') + 1, -1)
|
||||
.replace(/ /g, '')
|
||||
.split(',')
|
||||
const userAndPassword = `${delve(this.response, user)}:${delve(this.response, password)}`
|
||||
this.stash.set(name, Buffer.from(userAndPassword).toString('base64'))
|
||||
const userAndPassword = `${delve(response, user)}:${delve(response, password)}`
|
||||
stash.set(name, Buffer.from(userAndPassword).toString('base64'))
|
||||
} else {
|
||||
throw new Error(`Unknown transform: '${transform}'`)
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
@ -457,9 +371,9 @@ class TestRunner {
|
||||
* @param {object} the action to perform
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async do (action) {
|
||||
const cmd = this.parseDo(action)
|
||||
const api = delve(this.client, cmd.method).bind(this.client)
|
||||
async function doAction (action) {
|
||||
const cmd = parseDo(action)
|
||||
const api = delve(client, cmd.method).bind(client)
|
||||
|
||||
const options = { ignore: cmd.params.ignore, headers: action.headers }
|
||||
if (cmd.params.ignore) delete cmd.params.ignore
|
||||
@ -469,7 +383,7 @@ class TestRunner {
|
||||
var body = result ? result.body : null
|
||||
|
||||
if (action.warnings && warnings === null) {
|
||||
this.tap.fail('We should get a warning header', action.warnings)
|
||||
assert.fail('We should get a warning header', action.warnings)
|
||||
} else if (!action.warnings && warnings !== null) {
|
||||
// if there is only the 'default shard will change'
|
||||
// warning we skip the check, because the yaml
|
||||
@ -482,7 +396,7 @@ class TestRunner {
|
||||
})
|
||||
|
||||
if (hasDefaultShardsWarning === true && warnings.length > 1) {
|
||||
this.tap.fail('We are not expecting warnings', warnings)
|
||||
assert.fail('We are not expecting warnings', warnings)
|
||||
}
|
||||
} else if (action.warnings && warnings !== null) {
|
||||
// if the yaml warnings do not contain the
|
||||
@ -500,22 +414,22 @@ class TestRunner {
|
||||
warnings = warnings.filter(h => !h.test(/default\snumber\sof\sshards/g))
|
||||
}
|
||||
|
||||
this.tap.deepEqual(warnings, action.warnings)
|
||||
assert.ok(deepEqual(warnings, action.warnings))
|
||||
}
|
||||
|
||||
if (action.catch) {
|
||||
this.tap.true(
|
||||
assert.ok(
|
||||
parseDoError(err, action.catch),
|
||||
`the error should be: ${action.catch}`
|
||||
)
|
||||
try {
|
||||
this.response = JSON.parse(err.body)
|
||||
response = JSON.parse(err.body)
|
||||
} catch (e) {
|
||||
this.response = err.body
|
||||
response = err.body
|
||||
}
|
||||
} else {
|
||||
this.tap.error(err, `should not error: ${cmd.method}`, action)
|
||||
this.response = body
|
||||
assert.ifError(err, `should not error: ${cmd.method}`, action)
|
||||
response = body
|
||||
}
|
||||
}
|
||||
|
||||
@ -525,138 +439,139 @@ class TestRunner {
|
||||
* @param {object} the actions to perform
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async exec (name, actions) {
|
||||
this.tap.comment(name)
|
||||
async function exec (name, actions) {
|
||||
// tap.comment(name)
|
||||
for (const action of actions) {
|
||||
if (action.skip) {
|
||||
if (this.shouldSkip(action.skip)) {
|
||||
this.skip(this.fillStashedValues(action.skip))
|
||||
if (shouldSkip(esVersion, action.skip)) {
|
||||
logSkip(fillStashedValues(action.skip))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (action.do) {
|
||||
await this.do(this.fillStashedValues(action.do))
|
||||
await doAction(fillStashedValues(action.do))
|
||||
}
|
||||
|
||||
if (action.set) {
|
||||
const key = Object.keys(action.set)[0]
|
||||
this.set(this.fillStashedValues(key), action.set[key])
|
||||
set(fillStashedValues(key), action.set[key])
|
||||
}
|
||||
|
||||
if (action.transform_and_set) {
|
||||
const key = Object.keys(action.transform_and_set)[0]
|
||||
this.transform_and_set(key, action.transform_and_set[key])
|
||||
transform_and_set(key, action.transform_and_set[key])
|
||||
}
|
||||
|
||||
if (action.match) {
|
||||
const key = Object.keys(action.match)[0]
|
||||
this.match(
|
||||
match(
|
||||
// in some cases, the yaml refers to the body with an empty string
|
||||
key === '$body' || key === ''
|
||||
? this.response
|
||||
: delve(this.response, this.fillStashedValues(key)),
|
||||
? response
|
||||
: delve(response, fillStashedValues(key)),
|
||||
key === '$body'
|
||||
? action.match[key]
|
||||
: this.fillStashedValues(action.match)[key],
|
||||
: fillStashedValues(action.match)[key],
|
||||
action.match
|
||||
)
|
||||
}
|
||||
|
||||
if (action.lt) {
|
||||
const key = Object.keys(action.lt)[0]
|
||||
this.lt(
|
||||
delve(this.response, this.fillStashedValues(key)),
|
||||
this.fillStashedValues(action.lt)[key]
|
||||
lt(
|
||||
delve(response, fillStashedValues(key)),
|
||||
fillStashedValues(action.lt)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.gt) {
|
||||
const key = Object.keys(action.gt)[0]
|
||||
this.gt(
|
||||
delve(this.response, this.fillStashedValues(key)),
|
||||
this.fillStashedValues(action.gt)[key]
|
||||
gt(
|
||||
delve(response, fillStashedValues(key)),
|
||||
fillStashedValues(action.gt)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.lte) {
|
||||
const key = Object.keys(action.lte)[0]
|
||||
this.lte(
|
||||
delve(this.response, this.fillStashedValues(key)),
|
||||
this.fillStashedValues(action.lte)[key]
|
||||
lte(
|
||||
delve(response, fillStashedValues(key)),
|
||||
fillStashedValues(action.lte)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.gte) {
|
||||
const key = Object.keys(action.gte)[0]
|
||||
this.gte(
|
||||
delve(this.response, this.fillStashedValues(key)),
|
||||
this.fillStashedValues(action.gte)[key]
|
||||
gte(
|
||||
delve(response, fillStashedValues(key)),
|
||||
fillStashedValues(action.gte)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.length) {
|
||||
const key = Object.keys(action.length)[0]
|
||||
this.length(
|
||||
length(
|
||||
key === '$body' || key === ''
|
||||
? this.response
|
||||
: delve(this.response, this.fillStashedValues(key)),
|
||||
? response
|
||||
: delve(response, fillStashedValues(key)),
|
||||
key === '$body'
|
||||
? action.length[key]
|
||||
: this.fillStashedValues(action.length)[key]
|
||||
: fillStashedValues(action.length)[key]
|
||||
)
|
||||
}
|
||||
|
||||
if (action.is_true) {
|
||||
const isTrue = this.fillStashedValues(action.is_true)
|
||||
this.is_true(
|
||||
delve(this.response, isTrue),
|
||||
const isTrue = fillStashedValues(action.is_true)
|
||||
is_true(
|
||||
delve(response, isTrue),
|
||||
isTrue
|
||||
)
|
||||
}
|
||||
|
||||
if (action.is_false) {
|
||||
const isFalse = this.fillStashedValues(action.is_false)
|
||||
this.is_false(
|
||||
delve(this.response, isFalse),
|
||||
const isFalse = fillStashedValues(action.is_false)
|
||||
is_false(
|
||||
delve(response, isFalse),
|
||||
isFalse
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
return { run }
|
||||
}
|
||||
|
||||
/**
|
||||
* Asserts that the given value is truthy
|
||||
* @param {any} the value to check
|
||||
* @param {string} an optional message
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
is_true (val, msg) {
|
||||
this.tap.true(val, `expect truthy value: ${msg} - value: ${JSON.stringify(val)}`)
|
||||
return this
|
||||
}
|
||||
function is_true (val, msg) {
|
||||
assert.ok(val, `expect truthy value: ${msg} - value: ${JSON.stringify(val)}`)
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Asserts that the given value is falsey
|
||||
* @param {any} the value to check
|
||||
* @param {string} an optional message
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
is_false (val, msg) {
|
||||
this.tap.false(val, `expect falsey value: ${msg} - value: ${JSON.stringify(val)}`)
|
||||
return this
|
||||
}
|
||||
function is_false (val, msg) {
|
||||
assert.ok(!val, `expect falsey value: ${msg} - value: ${JSON.stringify(val)}`)
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Asserts that two values are the same
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
match (val1, val2, action) {
|
||||
function match (val1, val2, action) {
|
||||
// both values are objects
|
||||
if (typeof val1 === 'object' && typeof val2 === 'object') {
|
||||
this.tap.strictDeepEqual(val1, val2, action)
|
||||
assert.ok(deepEqual(val1, val2), action)
|
||||
// the first value is the body as string and the second a pattern string
|
||||
} else if (
|
||||
typeof val1 === 'string' && typeof val2 === 'string' &&
|
||||
@ -672,84 +587,79 @@ class TestRunner {
|
||||
.replace(/\s/g, '')
|
||||
.slice(1, -1)
|
||||
// 'm' adds the support for multiline regex
|
||||
this.tap.match(val1, new RegExp(regStr, 'm'), `should match pattern provided: ${val2}, action: ${JSON.stringify(action)}`)
|
||||
assert.ok(new RegExp(regStr, 'm').test(val1), `should match pattern provided: ${val2}, action: ${JSON.stringify(action)}`)
|
||||
// tap.match(val1, new RegExp(regStr, 'm'), `should match pattern provided: ${val2}, action: ${JSON.stringify(action)}`)
|
||||
// everything else
|
||||
} else {
|
||||
this.tap.strictEqual(val1, val2, `should be equal: ${val1} - ${val2}, action: ${JSON.stringify(action)}`)
|
||||
}
|
||||
return this
|
||||
assert.strictEqual(val1, val2, `should be equal: ${val1} - ${val2}, action: ${JSON.stringify(action)}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Asserts that the first value is less than the second
|
||||
* It also verifies that the two values are numbers
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
lt (val1, val2) {
|
||||
function lt (val1, val2) {
|
||||
;[val1, val2] = getNumbers(val1, val2)
|
||||
this.tap.true(val1 < val2)
|
||||
return this
|
||||
}
|
||||
assert.ok(val1 < val2)
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Asserts that the first value is greater than the second
|
||||
* It also verifies that the two values are numbers
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
gt (val1, val2) {
|
||||
function gt (val1, val2) {
|
||||
;[val1, val2] = getNumbers(val1, val2)
|
||||
this.tap.true(val1 > val2)
|
||||
return this
|
||||
}
|
||||
assert.ok(val1 > val2)
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Asserts that the first value is less than or equal the second
|
||||
* It also verifies that the two values are numbers
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
lte (val1, val2) {
|
||||
function lte (val1, val2) {
|
||||
;[val1, val2] = getNumbers(val1, val2)
|
||||
this.tap.true(val1 <= val2)
|
||||
return this
|
||||
}
|
||||
assert.ok(val1 <= val2)
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Asserts that the first value is greater than or equal the second
|
||||
* It also verifies that the two values are numbers
|
||||
* @param {any} the first value
|
||||
* @param {any} the second value
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
gte (val1, val2) {
|
||||
*/
|
||||
function gte (val1, val2) {
|
||||
;[val1, val2] = getNumbers(val1, val2)
|
||||
this.tap.true(val1 >= val2)
|
||||
return this
|
||||
}
|
||||
assert.ok(val1 >= val2)
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Asserts that the given value has the specified length
|
||||
* @param {string|object|array} the object to check
|
||||
* @param {number} the expected length
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
length (val, len) {
|
||||
function length (val, len) {
|
||||
if (typeof val === 'string' || Array.isArray(val)) {
|
||||
this.tap.strictEqual(val.length, len)
|
||||
assert.strictEqual(val.length, len)
|
||||
} else if (typeof val === 'object' && val !== null) {
|
||||
this.tap.strictEqual(Object.keys(val).length, len)
|
||||
assert.strictEqual(Object.keys(val).length, len)
|
||||
} else {
|
||||
this.tap.fail(`length: the given value is invalid: ${val}`)
|
||||
}
|
||||
return this
|
||||
assert.fail(`length: the given value is invalid: ${val}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Gets a `do` action object and returns a structured object,
|
||||
* where the action is the key and the parameter is the value.
|
||||
* Eg:
|
||||
@ -774,7 +684,7 @@ class TestRunner {
|
||||
* @param {object}
|
||||
* @returns {object}
|
||||
*/
|
||||
parseDo (action) {
|
||||
function parseDo (action) {
|
||||
return Object.keys(action).reduce((acc, val) => {
|
||||
switch (val) {
|
||||
case 'catch':
|
||||
@ -826,7 +736,6 @@ class TestRunner {
|
||||
|
||||
return newObj
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseDoError (err, spec) {
|
||||
@ -886,4 +795,83 @@ function getRandomInt (min, max) {
|
||||
return Math.floor(Math.random() * (max - min)) + min
|
||||
}
|
||||
|
||||
module.exports = TestRunner
|
||||
/**
|
||||
* Logs a skip
|
||||
* @param {object} the actions
|
||||
* @returns {TestRunner}
|
||||
*/
|
||||
function logSkip (action) {
|
||||
if (action.reason && action.version) {
|
||||
console.log(`Skip: ${action.reason} (${action.version})`)
|
||||
} else if (action.features) {
|
||||
console.log(`Skip: ${JSON.stringify(action.features)})`)
|
||||
} else {
|
||||
console.log('Skipped')
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decides if a test should be skipped
|
||||
* @param {object} the actions
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function shouldSkip (esVersion, action) {
|
||||
var shouldSkip = false
|
||||
// skip based on the version
|
||||
if (action.version) {
|
||||
if (action.version.trim() === 'all') return true
|
||||
const [min, max] = action.version.split('-').map(v => v.trim())
|
||||
// if both `min` and `max` are specified
|
||||
if (min && max) {
|
||||
shouldSkip = semver.satisfies(esVersion, action.version)
|
||||
// if only `min` is specified
|
||||
} else if (min) {
|
||||
shouldSkip = semver.gte(esVersion, min)
|
||||
// if only `max` is specified
|
||||
} else if (max) {
|
||||
shouldSkip = semver.lte(esVersion, max)
|
||||
// something went wrong!
|
||||
} else {
|
||||
throw new Error(`skip: Bad version range: ${action.version}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (shouldSkip) return true
|
||||
|
||||
if (action.features) {
|
||||
if (!Array.isArray(action.features)) action.features = [action.features]
|
||||
// returns true if one of the features is not present in the supportedFeatures
|
||||
shouldSkip = !!action.features.filter(f => !~supportedFeatures.indexOf(f)).length
|
||||
}
|
||||
|
||||
if (shouldSkip) return true
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the array syntax of keys and values
|
||||
* eg: 'hits.hits.1.stuff' to 'hits.hits[1].stuff'
|
||||
* @param {object} the action to update
|
||||
* @returns {obj} the updated action
|
||||
*/
|
||||
// function updateArraySyntax (obj) {
|
||||
// const newObj = {}
|
||||
|
||||
// for (const key in obj) {
|
||||
// const newKey = key.replace(/\.\d{1,}\./g, v => `[${v.slice(1, -1)}].`)
|
||||
// const val = obj[key]
|
||||
|
||||
// if (typeof val === 'string') {
|
||||
// newObj[newKey] = val.replace(/\.\d{1,}\./g, v => `[${v.slice(1, -1)}].`)
|
||||
// } else if (val !== null && typeof val === 'object') {
|
||||
// newObj[newKey] = updateArraySyntax(val)
|
||||
// } else {
|
||||
// newObj[newKey] = val
|
||||
// }
|
||||
// }
|
||||
|
||||
// return newObj
|
||||
// }
|
||||
|
||||
module.exports = build
|
||||
|
||||
@ -851,3 +851,87 @@ test('Elastic cloud config', t => {
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
test('Opaque Id support', t => {
|
||||
t.test('No opaqueId', t => {
|
||||
t.plan(3)
|
||||
|
||||
function handler (req, res) {
|
||||
t.strictEqual(req.headers['x-opaque-id'], undefined)
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('No prefix', t => {
|
||||
t.plan(3)
|
||||
|
||||
function handler (req, res) {
|
||||
t.strictEqual(req.headers['x-opaque-id'], 'bar')
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, {
|
||||
opaqueId: 'bar'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.test('With prefix', t => {
|
||||
t.plan(3)
|
||||
|
||||
function handler (req, res) {
|
||||
t.strictEqual(req.headers['x-opaque-id'], 'foo-bar')
|
||||
res.setHeader('Content-Type', 'application/json;utf=8')
|
||||
res.end(JSON.stringify({ hello: 'world' }))
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const client = new Client({
|
||||
node: `http://localhost:${port}`,
|
||||
opaqueIdPrefix: 'foo-'
|
||||
})
|
||||
|
||||
client.search({
|
||||
index: 'test',
|
||||
q: 'foo:bar'
|
||||
}, {
|
||||
opaqueId: 'bar'
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
t.deepEqual(body, { hello: 'world' })
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
@ -34,7 +34,6 @@ test('Should emit a request event when a request is performed', t => {
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': '0'
|
||||
}
|
||||
},
|
||||
@ -86,7 +85,6 @@ test('Should emit a response event in case of a successful response', t => {
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': '0'
|
||||
}
|
||||
},
|
||||
@ -136,7 +134,6 @@ test('Should emit a response event with the error set', t => {
|
||||
body: '',
|
||||
querystring: 'q=foo%3Abar',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': '0'
|
||||
}
|
||||
},
|
||||
|
||||
@ -1813,6 +1813,55 @@ test('Compress request', t => {
|
||||
}
|
||||
})
|
||||
|
||||
t.test('Should skip the compression for empty strings/null/undefined', t => {
|
||||
t.plan(9)
|
||||
|
||||
function handler (req, res) {
|
||||
t.strictEqual(req.headers['content-encoding'], undefined)
|
||||
t.strictEqual(req.headers['content-type'], undefined)
|
||||
res.end()
|
||||
}
|
||||
|
||||
buildServer(handler, ({ port }, server) => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
pool.addConnection(`http://localhost:${port}`)
|
||||
|
||||
const transport = new Transport({
|
||||
emit: () => {},
|
||||
connectionPool: pool,
|
||||
serializer: new Serializer(),
|
||||
maxRetries: 3,
|
||||
compression: 'gzip',
|
||||
requestTimeout: 30000,
|
||||
sniffInterval: false,
|
||||
sniffOnStart: false
|
||||
})
|
||||
|
||||
transport.request({
|
||||
method: 'DELETE',
|
||||
path: '/hello',
|
||||
body: ''
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
transport.request({
|
||||
method: 'GET',
|
||||
path: '/hello',
|
||||
body: null
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
transport.request({
|
||||
method: 'GET',
|
||||
path: '/hello',
|
||||
body: undefined
|
||||
}, (err, { body }) => {
|
||||
t.error(err)
|
||||
server.stop()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user