Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e7c40dd459 | |||
| 86361daea5 | |||
| 09c2ff8bf3 | |||
| 75d85cae19 | |||
| e30079f64d | |||
| 610bf851d2 | |||
| b91ff8a6cc | |||
| b39f29b2b0 | |||
| ef69bbd216 | |||
| 6194119e62 |
224
.ci/Jenkinsfile
vendored
Normal file
224
.ci/Jenkinsfile
vendored
Normal file
@ -0,0 +1,224 @@
|
||||
#!/usr/bin/env groovy
|
||||
|
||||
@Library('apm@current') _
|
||||
|
||||
def NODE_JS_VERSIONS = [8,10,12]
|
||||
def nodeJsVersion = NODE_JS_VERSIONS[randomNumber(min: 0, max:2)]
|
||||
|
||||
pipeline {
|
||||
agent {
|
||||
label 'docker && immutable'
|
||||
}
|
||||
|
||||
environment {
|
||||
REPO = 'elasticsearch-js'
|
||||
BASE_DIR = "src/github.com/elastic/${env.REPO}"
|
||||
NODE_JS_DEFAULT_VERSION = "${nodeJsVersion}"
|
||||
NODE_JS_VERSIONS = "${NODE_JS_VERSIONS.join(',')}"
|
||||
HOME = "${env.WORKSPACE}"
|
||||
npm_config_cache = 'npm-cache'
|
||||
}
|
||||
|
||||
options {
|
||||
timeout(time: 1, unit: 'HOURS')
|
||||
buildDiscarder(logRotator(numToKeepStr: '20', artifactNumToKeepStr: '20', daysToKeepStr: '30'))
|
||||
timestamps()
|
||||
ansiColor('xterm')
|
||||
disableResume()
|
||||
durabilityHint('PERFORMANCE_OPTIMIZED')
|
||||
}
|
||||
|
||||
triggers {
|
||||
issueCommentTrigger('(?i).*(?:jenkins\\W+)?run\\W+(?:the\\W+)?tests(?:\\W+please)?.*')
|
||||
// env.CHANGE_ID as a value in case of a commit or a pr, which means
|
||||
// that we will have a daily cron job only for branches that don't have an active pr
|
||||
cron(env.CHANGE_ID ? '' : '@daily')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
deleteDir()
|
||||
gitCheckout(basedir: "${BASE_DIR}", githubNotifyFirstTimeContributor: false)
|
||||
stash allowEmpty: true, name: 'source', useDefaultExcludes: false
|
||||
}
|
||||
}
|
||||
|
||||
stage('Install dependencies') {
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
deleteDir()
|
||||
unstash 'source'
|
||||
script {
|
||||
buildDockerImage(image: "node:${env.NODE_JS_DEFAULT_VERSION}-alpine").inside(){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'System info', script: 'node --version; npm --version')
|
||||
sh(label: 'Install dependencies', script: 'npm install')
|
||||
}
|
||||
}
|
||||
}
|
||||
stash allowEmpty: true, name: 'source-dependencies', useDefaultExcludes: false
|
||||
}
|
||||
}
|
||||
|
||||
stage('License check') {
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
withGithubNotify(context: 'License check') {
|
||||
deleteDir()
|
||||
unstash 'source-dependencies'
|
||||
script {
|
||||
buildDockerImage(image: "node:${env.NODE_JS_DEFAULT_VERSION}-alpine").inside(){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Check production dependencies licenses', script: 'npm run license-checker')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Linter') {
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
withGithubNotify(context: 'Linter') {
|
||||
deleteDir()
|
||||
unstash 'source-dependencies'
|
||||
script {
|
||||
buildDockerImage(image: "node:${env.NODE_JS_DEFAULT_VERSION}-alpine").inside(){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Lint code with standardjs', script: 'npm run lint')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Unit test') {
|
||||
failFast true
|
||||
options { skipDefaultCheckout() }
|
||||
steps {
|
||||
withGithubNotify(context: 'Unit test') {
|
||||
script {
|
||||
def versions = env.NODE_JS_VERSIONS.split(',')
|
||||
def parallelTasks = [:]
|
||||
versions.each{ version ->
|
||||
parallelTasks["Node.js v${version}"] = buildUnitTest(version: version)
|
||||
}
|
||||
parallel(parallelTasks)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('Integration test') {
|
||||
failFast true
|
||||
options { skipDefaultCheckout() }
|
||||
parallel {
|
||||
stage('OSS') {
|
||||
agent { label 'docker && immutable' }
|
||||
options { skipDefaultCheckout() }
|
||||
environment {
|
||||
TEST_ES_SERVER = 'http://elasticsearch:9200'
|
||||
}
|
||||
steps {
|
||||
withGithubNotify(context: 'Integration test OSS') {
|
||||
deleteDir()
|
||||
unstash 'source-dependencies'
|
||||
dir("${BASE_DIR}"){
|
||||
// Sometimes the docker registry fails and has random timeouts
|
||||
// this block will retry a doker image 3 times before to fail.
|
||||
retry(3) {
|
||||
sleep randomNumber(min: 5, max: 10)
|
||||
sh(label: 'Start Elasticsearch', script: './scripts/es-docker.sh --detach')
|
||||
}
|
||||
}
|
||||
script {
|
||||
buildDockerImage(fromDockerfile: true).inside('--network=elastic'){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Integration test', script: 'npm run test:integration | tee test-integration.tap')
|
||||
sh(label: 'Generating test reporting', script: './node_modules/.bin/tap-mocha-reporter xunit < test-integration.tap > junit-integration.xml')
|
||||
}
|
||||
}
|
||||
}
|
||||
sh(label: 'Stop Elasticsearch', script: 'docker kill $(docker ps -q)')
|
||||
junit(allowEmptyResults: true, keepLongStdio: true, testResults: "${BASE_DIR}/**/junit-*.xml")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage('xPack') {
|
||||
agent { label 'docker && immutable' }
|
||||
options { skipDefaultCheckout() }
|
||||
environment {
|
||||
TEST_ES_SERVER = 'https://elastic:changeme@elasticsearch:9200'
|
||||
}
|
||||
steps {
|
||||
withGithubNotify(context: 'Integration test xPack') {
|
||||
deleteDir()
|
||||
unstash 'source-dependencies'
|
||||
dir("${BASE_DIR}"){
|
||||
// Sometimes the docker registry fails and has random timeouts
|
||||
// this block will retry a doker image 3 times before to fail.
|
||||
retry(3) {
|
||||
sleep randomNumber(min: 5, max: 10)
|
||||
sh(label: 'Start Elasticsearch', script: './scripts/es-docker-platinum.sh --detach')
|
||||
}
|
||||
}
|
||||
script {
|
||||
buildDockerImage(fromDockerfile: true).inside('--network=elastic'){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Integration test', script: 'npm run test:integration | tee test-integration.tap')
|
||||
sh(label: 'Generating test reporting', script: './node_modules/.bin/tap-mocha-reporter xunit < test-integration.tap > junit-integration.xml')
|
||||
}
|
||||
}
|
||||
}
|
||||
sh(label: 'Stop Elasticsearch', script: 'docker kill $(docker ps -q)')
|
||||
junit(allowEmptyResults: true, keepLongStdio: true, testResults: "${BASE_DIR}/**/junit-*.xml")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sometimes the docker registry fails and has random timeouts
|
||||
// this function will retry a doker image 3 times before to fail.
|
||||
def buildDockerImage(args) {
|
||||
def image
|
||||
retry(3) {
|
||||
sleep randomNumber(min: 5, max: 10)
|
||||
if (args.fromDockerfile == true) {
|
||||
image = docker.build('nodejs-image', "--build-arg NODE_JS_VERSION=${env.NODE_JS_DEFAULT_VERSION} ${BASE_DIR}/.ci/docker")
|
||||
} else {
|
||||
image = docker.image(args.image)
|
||||
// make sure we have the latest available from Docker Hub
|
||||
image.pull()
|
||||
}
|
||||
}
|
||||
return image
|
||||
}
|
||||
|
||||
def buildUnitTest(args) {
|
||||
return {
|
||||
node('docker && immutable') {
|
||||
deleteDir()
|
||||
unstash 'source'
|
||||
script {
|
||||
buildDockerImage(image: "node:${args.version}-alpine").inside(){
|
||||
dir("${BASE_DIR}"){
|
||||
sh(label: 'Install dependencies', script: 'npm install')
|
||||
sh(label: 'Run unit test', script: 'npm run test:unit | tee test-unit.tap')
|
||||
sh(label: 'Run behavior test', script: 'npm run test:behavior | tee test-behavior.tap')
|
||||
sh(label: 'Run types test', script: 'npm run test:types')
|
||||
sh(label: 'Generating test reporting', script: './node_modules/.bin/tap-mocha-reporter xunit < test-unit.tap > junit-unit.xml; ./node_modules/.bin/tap-mocha-reporter xunit < test-behavior.tap > junit-behavior.xml')
|
||||
}
|
||||
}
|
||||
}
|
||||
junit(allowEmptyResults: true, keepLongStdio: true, testResults: "${BASE_DIR}/**/junit-*.xml")
|
||||
}
|
||||
}
|
||||
}
|
||||
7
.ci/docker/Dockerfile
Normal file
7
.ci/docker/Dockerfile
Normal file
@ -0,0 +1,7 @@
|
||||
ARG NODE_JS_VERSION=10
|
||||
FROM node:${NODE_JS_VERSION}-alpine
|
||||
|
||||
RUN apk --no-cache add git
|
||||
|
||||
# Create app directory
|
||||
WORKDIR /usr/src/app
|
||||
@ -1,70 +0,0 @@
|
||||
---
|
||||
|
||||
##### GLOBAL METADATA
|
||||
|
||||
- meta:
|
||||
cluster: clients-ci
|
||||
|
||||
##### JOB DEFAULTS
|
||||
|
||||
- job:
|
||||
project-type: matrix
|
||||
logrotate:
|
||||
daysToKeep: 30
|
||||
numToKeep: 100
|
||||
properties:
|
||||
- github:
|
||||
url: https://github.com/elastic/elasticsearch-js/
|
||||
- inject:
|
||||
properties-content: HOME=$JENKINS_HOME
|
||||
concurrent: true
|
||||
node: flyweight
|
||||
scm:
|
||||
- git:
|
||||
name: origin
|
||||
credentials-id: f6c7695a-671e-4f4f-a331-acdce44ff9ba
|
||||
reference-repo: /var/lib/jenkins/.git-references/elasticsearch-js.git
|
||||
branches:
|
||||
- ${branch_specifier}
|
||||
url: https://github.com/elastic/elasticsearch-js.git
|
||||
wipe-workspace: 'True'
|
||||
triggers:
|
||||
- github
|
||||
vault:
|
||||
# vault read auth/approle/role/clients-ci/role-id
|
||||
role_id: ddbd0d44-0e51-105b-177a-c8fdfd445126
|
||||
axes:
|
||||
- axis:
|
||||
type: slave
|
||||
name: label
|
||||
values:
|
||||
- linux
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: ELASTICSEARCH_VERSION
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: NODE_JS_VERSION
|
||||
yaml-strategy:
|
||||
exclude-key: exclude
|
||||
filename: .ci/test-matrix.yml
|
||||
wrappers:
|
||||
- ansicolor
|
||||
- timeout:
|
||||
type: absolute
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
- workspace-cleanup
|
||||
builders:
|
||||
- shell: |-
|
||||
#!/usr/local/bin/runbld
|
||||
.ci/run-tests
|
||||
publishers:
|
||||
- email:
|
||||
recipients: infra-root+build@elastic.co
|
||||
# - junit:
|
||||
# results: "*-junit.xml"
|
||||
# allow-empty-results: true
|
||||
@ -1,14 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+5.x
|
||||
display-name: 'elastic / elasticsearch-js # 5.x'
|
||||
description: Testing the elasticsearch-js 5.x branch.
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/5.x
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@weekly'
|
||||
@ -1,14 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+6.x
|
||||
display-name: 'elastic / elasticsearch-js # 6.x'
|
||||
description: Testing the elasticsearch-js 6.x branch.
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/6.x
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@weekly'
|
||||
@ -1,14 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+7.x
|
||||
display-name: 'elastic / elasticsearch-js # 7.x'
|
||||
description: Testing the elasticsearch-js 7.x branch.
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/7.x
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@weekly'
|
||||
@ -1,14 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+master
|
||||
display-name: 'elastic / elasticsearch-js # master'
|
||||
description: Testing the elasticsearch-js master branch.
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/master
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@daily'
|
||||
55
.ci/jobs/elastic+elasticsearch-js+mbp.yml
Normal file
55
.ci/jobs/elastic+elasticsearch-js+mbp.yml
Normal file
@ -0,0 +1,55 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+mbp
|
||||
display-name: 'elastic / elasticsearch-js'
|
||||
description: Testing elasticsearch-js.
|
||||
project-type: multibranch
|
||||
properties: []
|
||||
triggers: []
|
||||
logrotate:
|
||||
daysToKeep: 30
|
||||
number-to-keep: '5'
|
||||
days-to-keep: '1'
|
||||
concurrent: true
|
||||
node: linux
|
||||
script-path: .ci/Jenkinsfile
|
||||
scm:
|
||||
- github:
|
||||
branch-discovery: no-pr
|
||||
discover-pr-forks-strategy: merge-current
|
||||
discover-pr-forks-trust: permission
|
||||
discover-pr-origin: merge-current
|
||||
discover-tags: false
|
||||
repo: elasticsearch-js
|
||||
repo-owner: elastic
|
||||
credentials-id: 2a9602aa-ab9f-4e52-baf3-b71ca88469c7-UserAndToken
|
||||
ssh-checkout:
|
||||
credentials: f6c7695a-671e-4f4f-a331-acdce44ff9ba
|
||||
build-strategies:
|
||||
- tags:
|
||||
ignore-tags-older-than: -1
|
||||
ignore-tags-newer-than: -1
|
||||
- regular-branches: true
|
||||
- change-request:
|
||||
ignore-target-only-changes: false
|
||||
clean:
|
||||
after: true
|
||||
before: true
|
||||
prune: true
|
||||
shallow-clone: true
|
||||
depth: 3
|
||||
do-not-fetch-tags: true
|
||||
submodule:
|
||||
disable: false
|
||||
recursive: true
|
||||
parent-credentials: true
|
||||
timeout: 100
|
||||
timeout: '15'
|
||||
use-author: true
|
||||
wipe-workspace: 'True'
|
||||
periodic-folder-trigger: 1d
|
||||
prune-dead-branches: true
|
||||
publishers:
|
||||
- email:
|
||||
recipients: infra-root+build@elastic.co
|
||||
|
||||
@ -1,19 +0,0 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+elasticsearch-js+pull-request
|
||||
display-name: 'elastic / elasticsearch-js # pull-request'
|
||||
description: Testing of elasticsearch-js pull requests.
|
||||
scm:
|
||||
- git:
|
||||
branches:
|
||||
- ${ghprbActualCommit}
|
||||
refspec: +refs/pull/*:refs/remotes/origin/pr/*
|
||||
triggers:
|
||||
- github-pull-request:
|
||||
org-list:
|
||||
- elastic
|
||||
allow-whitelist-orgs-as-admins: true
|
||||
github-hooks: true
|
||||
status-context: clients-ci
|
||||
cancel-builds-on-update: true
|
||||
publishers: []
|
||||
14
.ci/packer_cache.sh
Normal file
14
.ci/packer_cache.sh
Normal file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
source /usr/local/bin/bash_standard_lib.sh
|
||||
|
||||
DOCKER_IMAGES="node:12-alpine
|
||||
node:10-alpine
|
||||
node:8-alpine
|
||||
"
|
||||
|
||||
for di in ${DOCKER_IMAGES}
|
||||
do
|
||||
(retry 2 docker pull "${di}") || echo "Error pulling ${di} Docker image, we continue"
|
||||
done
|
||||
|
||||
@ -5,6 +5,9 @@ node_js:
|
||||
- "10"
|
||||
- "8"
|
||||
|
||||
cache:
|
||||
npm: false
|
||||
|
||||
os:
|
||||
- windows
|
||||
- linux
|
||||
@ -14,7 +17,9 @@ install:
|
||||
|
||||
script:
|
||||
- if [ "$TRAVIS_OS_NAME" = "linux" ]; then npm run license-checker; fi
|
||||
- npm run test
|
||||
- npm run lint
|
||||
- npm run test:coverage
|
||||
- npm run test:types
|
||||
|
||||
notifications:
|
||||
email:
|
||||
|
||||
@ -86,7 +86,7 @@ _Default:_ `false`
|
||||
_Default:_ `false`
|
||||
|
||||
|`sniffEndpoint`
|
||||
|`string` - Max request timeout for each request. +
|
||||
|`string` - Endpoint to ping during a sniff. +
|
||||
_Default:_ `'_nodes/_all/http'`
|
||||
|
||||
|`sniffOnConnectionFault`
|
||||
|
||||
@ -8,57 +8,83 @@ This can greatly increase the indexing speed.
|
||||
----
|
||||
'use strict'
|
||||
|
||||
require('array.prototype.flatmap').shim()
|
||||
const { Client } = require('@elastic/elasticsearch')
|
||||
const client = new Client({ node: 'http://localhost:9200' })
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200'
|
||||
})
|
||||
|
||||
async function run () {
|
||||
const { body: bulkResponse } = await client.bulk({
|
||||
// here we are forcing an index refresh,
|
||||
// otherwise we will not get any result
|
||||
// in the consequent search
|
||||
refresh: true,
|
||||
body: [
|
||||
// operation to perform
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
// the document to index
|
||||
{
|
||||
character: 'Ned Stark',
|
||||
quote: 'Winter is coming.'
|
||||
},
|
||||
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
{
|
||||
character: 'Daenerys Targaryen',
|
||||
quote: 'I am the blood of the dragon.'
|
||||
},
|
||||
|
||||
{ index: { _index: 'game-of-thrones' } },
|
||||
{
|
||||
character: 'Tyrion Lannister',
|
||||
quote: 'A mind needs books like a sword needs a whetstone.'
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
if (bulkResponse.errors) {
|
||||
console.log(bulkResponse)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Let's search!
|
||||
const { body } = await client.search({
|
||||
index: 'game-of-thrones',
|
||||
await client.indices.create({
|
||||
index: 'tweets',
|
||||
body: {
|
||||
query: {
|
||||
match: {
|
||||
quote: 'winter'
|
||||
mappings: {
|
||||
properties: {
|
||||
id: { type: 'integer' },
|
||||
text: { type: 'text' },
|
||||
user: { type: 'keyword' },
|
||||
time: { type: 'date' }
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}, { ignore: [400] })
|
||||
|
||||
console.log(body.hits.hits)
|
||||
const dataset = [{
|
||||
id: 1,
|
||||
text: 'If I fall, don\'t bring me back.',
|
||||
user: 'jon',
|
||||
date: new Date()
|
||||
}, {
|
||||
id: 2,
|
||||
text: 'Witer is coming',
|
||||
user: 'ned',
|
||||
date: new Date()
|
||||
}, {
|
||||
id: 3,
|
||||
text: 'A Lannister always pays his debts.',
|
||||
user: 'tyrion',
|
||||
date: new Date()
|
||||
}, {
|
||||
id: 4,
|
||||
text: 'I am the blood of the dragon.',
|
||||
user: 'daenerys',
|
||||
date: new Date()
|
||||
}, {
|
||||
id: 5, // change this value to a string to see the bulk response with errors
|
||||
text: 'A girl is Arya Stark of Winterfell. And I\'m going home.',
|
||||
user: 'arya',
|
||||
date: new Date()
|
||||
}]
|
||||
|
||||
const body = dataset.flatMap(doc => [{ index: { _index: 'tweets' } }, doc])
|
||||
|
||||
const { body: bulkResponse } = await client.bulk({ refresh: true, body })
|
||||
|
||||
if (bulkResponse.errors) {
|
||||
const erroredDocuments = []
|
||||
// The items array has the same order of the dataset we just indexed.
|
||||
// The presence of the `error` key indicates that the operation
|
||||
// that we did for the document has failed.
|
||||
bulkResponse.items.forEach((action, i) => {
|
||||
const operation = Object.keys(action)[0]
|
||||
if (action[operation].error) {
|
||||
erroredDocuments.push({
|
||||
// If the status is 429 it means that you can retry the document,
|
||||
// otherwise it's very likely a mapping error, and you should
|
||||
// fix the document before to try it again.
|
||||
status: action[operation].status,
|
||||
error: action[operation].error,
|
||||
operation: body[i * 2],
|
||||
document: body[i * 2 + 1]
|
||||
})
|
||||
}
|
||||
})
|
||||
console.log(erroredDocuments)
|
||||
}
|
||||
|
||||
const { body: count } = await client.count({ index: 'tweets' })
|
||||
console.log(count)
|
||||
}
|
||||
|
||||
run().catch(console.log)
|
||||
----
|
||||
----
|
||||
|
||||
2
lib/Connection.d.ts
vendored
2
lib/Connection.d.ts
vendored
@ -25,6 +25,8 @@ interface ConnectionOptions {
|
||||
|
||||
interface RequestOptions extends http.ClientRequestArgs {
|
||||
asStream?: boolean;
|
||||
body?: any;
|
||||
querystring?: string;
|
||||
}
|
||||
|
||||
export interface AgentOptions {
|
||||
|
||||
@ -212,9 +212,9 @@ class BaseConnectionPool {
|
||||
url: new URL(address),
|
||||
id: ids[i],
|
||||
roles: Object.assign({
|
||||
[Connection.roles.MASTER]: true,
|
||||
[Connection.roles.DATA]: true,
|
||||
[Connection.roles.INGEST]: true,
|
||||
[Connection.roles.MASTER]: false,
|
||||
[Connection.roles.DATA]: false,
|
||||
[Connection.roles.INGEST]: false,
|
||||
[Connection.roles.ML]: false
|
||||
}, roles)
|
||||
})
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"homepage": "http://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/index.html",
|
||||
"version": "6.8.2",
|
||||
"version": "6.8.3",
|
||||
"keywords": [
|
||||
"elasticsearch",
|
||||
"elastic",
|
||||
@ -20,13 +20,14 @@
|
||||
"test:unit": "tap test/unit/*.test.js -t 300 --no-coverage",
|
||||
"test:behavior": "tap test/behavior/*.test.js -t 300 --no-coverage",
|
||||
"test:integration": "tap test/integration/index.js -T --no-coverage",
|
||||
"test:integration:report": "npm run test:integration | tap-mocha-reporter xunit > $WORKSPACE/test-report-junit.xml",
|
||||
"test:types": "tsc --project ./test/types/tsconfig.json",
|
||||
"test:coverage": "nyc tap test/unit/*.test.js test/behavior/*.test.js -t 300 && nyc report --reporter=text-lcov > coverage.lcov && codecov",
|
||||
"lint": "standard",
|
||||
"lint:fix": "standard --fix",
|
||||
"ci": "npm run license-checker && npm test && npm run test:integration && npm run test:coverage",
|
||||
"license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause'"
|
||||
"license-checker": "license-checker --production --onlyAllow='MIT;Apache-2.0;Apache1.1;ISC;BSD-3-Clause;BSD-2-Clause'",
|
||||
"elasticsearch": "./scripts/es-docker.sh",
|
||||
"elasticsearch:xpack": "./scripts/es-docker-platinum.sh"
|
||||
},
|
||||
"author": {
|
||||
"name": "Tomas Della Vedova",
|
||||
|
||||
@ -6,31 +6,66 @@ testnodekey="/.ci/certs/testnode.key"
|
||||
cacrt="/.ci/certs/ca.crt"
|
||||
|
||||
# pass `--clean` to reemove the old snapshot
|
||||
if [ "$1" != "" ]; then
|
||||
if [ "$1" == "--clean" ]; then
|
||||
docker rmi $(docker images --format '{{.Repository}}:{{.Tag}}' | grep '8.0.0-SNAPSHOT')
|
||||
fi
|
||||
|
||||
exec docker run \
|
||||
--rm \
|
||||
-e "node.attr.testattr=test" \
|
||||
-e "path.repo=/tmp" \
|
||||
-e "repositories.url.allowed_urls=http://snapshot.*" \
|
||||
-e "discovery.type=single-node" \
|
||||
-e "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
|
||||
-e "ELASTIC_PASSWORD=changeme" \
|
||||
-e "xpack.security.enabled=true" \
|
||||
-e "xpack.license.self_generated.type=trial" \
|
||||
-e "xpack.security.http.ssl.enabled=true" \
|
||||
-e "xpack.security.http.ssl.verification_mode=certificate" \
|
||||
-e "xpack.security.http.ssl.key=certs/testnode.key" \
|
||||
-e "xpack.security.http.ssl.certificate=certs/testnode.crt" \
|
||||
-e "xpack.security.http.ssl.certificate_authorities=certs/ca.crt" \
|
||||
-e "xpack.security.transport.ssl.enabled=true" \
|
||||
-e "xpack.security.transport.ssl.key=certs/testnode.key" \
|
||||
-e "xpack.security.transport.ssl.certificate=certs/testnode.crt" \
|
||||
-e "xpack.security.transport.ssl.certificate_authorities=certs/ca.crt" \
|
||||
-v "$repo$testnodecrt:/usr/share/elasticsearch/config/certs/testnode.crt" \
|
||||
-v "$repo$testnodekey:/usr/share/elasticsearch/config/certs/testnode.key" \
|
||||
-v "$repo$cacrt:/usr/share/elasticsearch/config/certs/ca.crt" \
|
||||
-p 9200:9200 \
|
||||
docker.elastic.co/elasticsearch/elasticsearch:6.7.1
|
||||
# Create the 'elastic' network if doesn't exist
|
||||
exec docker network ls | grep elastic > /dev/null || docker network create elastic > /dev/null
|
||||
|
||||
if [ "$1" == "--detach" ]; then
|
||||
exec docker run \
|
||||
--rm \
|
||||
-e "node.attr.testattr=test" \
|
||||
-e "path.repo=/tmp" \
|
||||
-e "repositories.url.allowed_urls=http://snapshot.*" \
|
||||
-e "discovery.type=single-node" \
|
||||
-e "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
|
||||
-e "ELASTIC_PASSWORD=changeme" \
|
||||
-e "xpack.security.enabled=true" \
|
||||
-e "xpack.license.self_generated.type=trial" \
|
||||
-e "xpack.security.http.ssl.enabled=true" \
|
||||
-e "xpack.security.http.ssl.verification_mode=certificate" \
|
||||
-e "xpack.security.http.ssl.key=certs/testnode.key" \
|
||||
-e "xpack.security.http.ssl.certificate=certs/testnode.crt" \
|
||||
-e "xpack.security.http.ssl.certificate_authorities=certs/ca.crt" \
|
||||
-e "xpack.security.transport.ssl.enabled=true" \
|
||||
-e "xpack.security.transport.ssl.key=certs/testnode.key" \
|
||||
-e "xpack.security.transport.ssl.certificate=certs/testnode.crt" \
|
||||
-e "xpack.security.transport.ssl.certificate_authorities=certs/ca.crt" \
|
||||
-v "$repo$testnodecrt:/usr/share/elasticsearch/config/certs/testnode.crt" \
|
||||
-v "$repo$testnodekey:/usr/share/elasticsearch/config/certs/testnode.key" \
|
||||
-v "$repo$cacrt:/usr/share/elasticsearch/config/certs/ca.crt" \
|
||||
-p 9200:9200 \
|
||||
--detach \
|
||||
--network=elastic \
|
||||
--name=elasticsearch \
|
||||
docker.elastic.co/elasticsearch/elasticsearch:6.8.2
|
||||
else
|
||||
exec docker run \
|
||||
--rm \
|
||||
-e "node.attr.testattr=test" \
|
||||
-e "path.repo=/tmp" \
|
||||
-e "repositories.url.allowed_urls=http://snapshot.*" \
|
||||
-e "discovery.type=single-node" \
|
||||
-e "ES_JAVA_OPTS=-Xms1g -Xmx1g" \
|
||||
-e "ELASTIC_PASSWORD=changeme" \
|
||||
-e "xpack.security.enabled=true" \
|
||||
-e "xpack.license.self_generated.type=trial" \
|
||||
-e "xpack.security.http.ssl.enabled=true" \
|
||||
-e "xpack.security.http.ssl.verification_mode=certificate" \
|
||||
-e "xpack.security.http.ssl.key=certs/testnode.key" \
|
||||
-e "xpack.security.http.ssl.certificate=certs/testnode.crt" \
|
||||
-e "xpack.security.http.ssl.certificate_authorities=certs/ca.crt" \
|
||||
-e "xpack.security.transport.ssl.enabled=true" \
|
||||
-e "xpack.security.transport.ssl.key=certs/testnode.key" \
|
||||
-e "xpack.security.transport.ssl.certificate=certs/testnode.crt" \
|
||||
-e "xpack.security.transport.ssl.certificate_authorities=certs/ca.crt" \
|
||||
-v "$repo$testnodecrt:/usr/share/elasticsearch/config/certs/testnode.crt" \
|
||||
-v "$repo$testnodekey:/usr/share/elasticsearch/config/certs/testnode.key" \
|
||||
-v "$repo$cacrt:/usr/share/elasticsearch/config/certs/ca.crt" \
|
||||
-p 9200:9200 \
|
||||
--network=elastic \
|
||||
--name=elasticsearch \
|
||||
docker.elastic.co/elasticsearch/elasticsearch:6.8.2
|
||||
fi
|
||||
|
||||
@ -1,12 +1,38 @@
|
||||
#!/bin/bash
|
||||
|
||||
exec docker run \
|
||||
--rm \
|
||||
-e "node.attr.testattr=test" \
|
||||
-e "path.repo=/tmp" \
|
||||
-e "repositories.url.allowed_urls=http://snapshot.*" \
|
||||
-e "discovery.type=single-node" \
|
||||
-p 9200:9200 \
|
||||
--network=elastic \
|
||||
--name=elasticsearch \
|
||||
docker.elastic.co/elasticsearch/elasticsearch:6.7.1
|
||||
# Images are cached locally, it may be needed
|
||||
# to delete an old image and download again
|
||||
# the latest snapshot.
|
||||
|
||||
# pass `--clean` to reemove the old snapshot
|
||||
if [ "$1" == "--clean" ]; then
|
||||
docker rmi $(docker images --format '{{.Repository}}:{{.Tag}}' | grep '8.0.0-SNAPSHOT')
|
||||
fi
|
||||
|
||||
# Create the 'elastic' network if doesn't exist
|
||||
exec docker network ls | grep elastic > /dev/null || docker network create elastic > /dev/null
|
||||
|
||||
if [ "$1" == "--detach" ]; then
|
||||
exec docker run \
|
||||
--rm \
|
||||
-e "node.attr.testattr=test" \
|
||||
-e "path.repo=/tmp" \
|
||||
-e "repositories.url.allowed_urls=http://snapshot.*" \
|
||||
-e "discovery.type=single-node" \
|
||||
-p 9200:9200 \
|
||||
--detach \
|
||||
--network=elastic \
|
||||
--name=elasticsearch \
|
||||
docker.elastic.co/elasticsearch/elasticsearch:6.8.2
|
||||
else
|
||||
exec docker run \
|
||||
--rm \
|
||||
-e "node.attr.testattr=test" \
|
||||
-e "path.repo=/tmp" \
|
||||
-e "repositories.url.allowed_urls=http://snapshot.*" \
|
||||
-e "discovery.type=single-node" \
|
||||
-p 9200:9200 \
|
||||
--network=elastic \
|
||||
--name=elasticsearch \
|
||||
docker.elastic.co/elasticsearch/elasticsearch:6.8.2
|
||||
fi
|
||||
|
||||
@ -474,6 +474,45 @@ test('API', t => {
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.test('Should map roles', t => {
|
||||
const pool = new ConnectionPool({ Connection })
|
||||
const nodes = {
|
||||
a1: {
|
||||
http: {
|
||||
publish_address: 'example.com:9200'
|
||||
},
|
||||
roles: ['master', 'data', 'ingest', 'ml']
|
||||
},
|
||||
a2: {
|
||||
http: {
|
||||
publish_address: 'example.com:9201'
|
||||
},
|
||||
roles: []
|
||||
}
|
||||
}
|
||||
t.same(pool.nodesToHost(nodes, 'http:'), [{
|
||||
url: new URL('http://example.com:9200'),
|
||||
id: 'a1',
|
||||
roles: {
|
||||
master: true,
|
||||
data: true,
|
||||
ingest: true,
|
||||
ml: true
|
||||
}
|
||||
}, {
|
||||
url: new URL('http://example.com:9201'),
|
||||
id: 'a2',
|
||||
roles: {
|
||||
master: false,
|
||||
data: false,
|
||||
ingest: false,
|
||||
ml: false
|
||||
}
|
||||
}])
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
t.end()
|
||||
})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user