Compare commits
3 Commits
v9.0.0-alp
...
drop-body-
| Author | SHA1 | Date | |
|---|---|---|---|
| 70aae3b44f | |||
| 444975b4e6 | |||
| 341168d2a1 |
54
scripts/codemod/drop-body.test.ts
Normal file
54
scripts/codemod/drop-body.test.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { Client } from '../..'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
auth: { username: 'elastic', password: 'changeme' }
|
||||
})
|
||||
|
||||
async function doThings () {
|
||||
// should get fixed by codemod
|
||||
await client.closePointInTime({
|
||||
body: {
|
||||
id: 'foobar'
|
||||
}
|
||||
})
|
||||
|
||||
await client.asyncSearch.get({
|
||||
// @ts-expect-error should get fixed by codemod
|
||||
body: {
|
||||
id: 'foo'
|
||||
}
|
||||
})
|
||||
|
||||
// @ts-expect-error should get fixed by codemod
|
||||
await client.create({
|
||||
id: 'foo',
|
||||
body: { index: 'my-index' }
|
||||
})
|
||||
|
||||
await client.watcher.putWatch({
|
||||
id: 'foo',
|
||||
active: true
|
||||
})
|
||||
|
||||
const body = { id: 'foo' }
|
||||
// @ts-expect-error should get fixed by codemod
|
||||
await client.asyncSearch.get({ body })
|
||||
await client.asyncSearch.get(body)
|
||||
|
||||
const request = { body }
|
||||
// @ts-expect-error should get fixed by codemod
|
||||
await client.asyncSearch.get(request)
|
||||
|
||||
const request2 = body
|
||||
await client.closePointInTime(request2)
|
||||
|
||||
// some non-client calls
|
||||
const x = Math.random()
|
||||
console.log(x)
|
||||
console.log({ body: 'foo' })
|
||||
}
|
||||
|
||||
doThings()
|
||||
.then(() => console.log('done'))
|
||||
.catch(() => console.error('uh oh'))
|
||||
172
scripts/codemod/drop-body.ts
Normal file
172
scripts/codemod/drop-body.ts
Normal file
@ -0,0 +1,172 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License") you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import ts from 'typescript'
|
||||
import path from 'node:path'
|
||||
import minimist from 'minimist'
|
||||
|
||||
const apis = [
|
||||
'asyncSearch',
|
||||
'autoscaling',
|
||||
'bulk',
|
||||
'capabilities',
|
||||
'cat',
|
||||
'ccr',
|
||||
'clearScroll',
|
||||
'closePointInTime',
|
||||
'cluster',
|
||||
'connector',
|
||||
'count',
|
||||
'create',
|
||||
'danglingIndices',
|
||||
'delete',
|
||||
'deleteByQuery',
|
||||
'deleteByQueryRethrottle',
|
||||
'deleteScript',
|
||||
'enrich',
|
||||
'eql',
|
||||
'esql',
|
||||
'exists',
|
||||
'existsSource',
|
||||
'explain',
|
||||
'features',
|
||||
'fieldCaps',
|
||||
]
|
||||
|
||||
/**
|
||||
* Detects whether a node is a `Client` instance identifier
|
||||
* @remarks Uses duck-typing by checking that several Elasticsearch APIs exist as members on the identifier
|
||||
*/
|
||||
function isClient(node: ts.Identifier) {
|
||||
const type = checker.getTypeAtLocation(node)
|
||||
const properties = type.getProperties().map(prop => prop.escapedName.toString())
|
||||
|
||||
for (const api of apis) {
|
||||
if (!properties.includes(api)) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the call expression node is running a client API function, otherwise false
|
||||
*/
|
||||
function isClientExpression(node: ts.CallExpression): boolean {
|
||||
let flag = false
|
||||
function visitIdentifiers(node: ts.Node) {
|
||||
if (ts.isIdentifier(node) && isClient(node)) {
|
||||
flag = true
|
||||
return
|
||||
}
|
||||
ts.forEachChild(node, visitIdentifiers)
|
||||
}
|
||||
visitIdentifiers(node)
|
||||
return flag
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of all call expressions to `Client` functions
|
||||
*/
|
||||
function collectClientCallExpressions(node: ts.SourceFile): ts.CallExpression[] {
|
||||
const clientExpressions: ts.CallExpression[] = []
|
||||
|
||||
// recurse through all child nodes looking for `Client` call expressions
|
||||
function collect(node: ts.Node) {
|
||||
if (ts.isCallExpression(node)) {
|
||||
// look for client identifier
|
||||
if (isClientExpression(node)) {
|
||||
clientExpressions.push(node)
|
||||
}
|
||||
}
|
||||
|
||||
ts.forEachChild(node, collect)
|
||||
}
|
||||
|
||||
ts.forEachChild(node, collect)
|
||||
|
||||
return clientExpressions
|
||||
}
|
||||
|
||||
function fixBodyProp(sourceFile: ts.SourceFile, node: ts.Node) {
|
||||
if (ts.isObjectLiteralExpression(node)) {
|
||||
// @ts-expect-error need to cast `prop` to a more specific type
|
||||
const prop = node.properties.find(prop => prop.name.escapedText === 'body')
|
||||
if (prop != null) {
|
||||
console.log('// needs fix:')
|
||||
console.log(sourceFile?.text.slice(node.pos, node.end))
|
||||
|
||||
// TODO: fix { body: value }
|
||||
// TODO: fix { body: { ... } }
|
||||
// TODO: fix { body }
|
||||
}
|
||||
} else if (ts.isIdentifier(node)) {
|
||||
// @ts-expect-error
|
||||
if (node.flowNode.antecedent?.node != null) {
|
||||
// @ts-expect-error
|
||||
fixBodyProp(sourceFile, node.flowNode.antecedent.node)
|
||||
} else {
|
||||
// console.log('uh oh')
|
||||
// console.log(sourceFile?.text.slice(node.pos, node.end))
|
||||
}
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
if (node.flowNode?.antecedent?.node != null) {
|
||||
// console.log('two')
|
||||
// @ts-expect-error
|
||||
fixBodyProp(sourceFile, node.flowNode.antecedent.node)
|
||||
} else {
|
||||
// console.log('something else')
|
||||
// console.log(node.kind)
|
||||
// console.log(sourceFile?.text.slice(node.pos, node.end))
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function lookForBodyProp(sourceFile: ts.SourceFile, node: ts.CallExpression) {
|
||||
if (node.arguments.length === 0) return
|
||||
const first = node.arguments[0]
|
||||
fixBodyProp(sourceFile, first)
|
||||
}
|
||||
|
||||
// build TS project from provided file names
|
||||
const args = minimist(process.argv.slice(2))
|
||||
const cwd = process.cwd()
|
||||
const files = args._.map(file => path.join(cwd, file))
|
||||
const program = ts.createProgram(files, {})
|
||||
const checker = program.getTypeChecker()
|
||||
|
||||
let processed = 0
|
||||
program.getSourceFiles().forEach(sourceFile => {
|
||||
if (program.isSourceFileFromExternalLibrary(sourceFile)) return
|
||||
const { fileName } = sourceFile
|
||||
|
||||
try {
|
||||
// get all `Client` call expressions
|
||||
const exprs = collectClientCallExpressions(sourceFile)
|
||||
if (exprs.length > 0) {
|
||||
console.log(`found ${exprs.length} Client expressions in ${fileName}`)
|
||||
}
|
||||
// for each call expression, get the first function argument, determine if it's an object and whether it has a `body` key
|
||||
exprs.forEach(expr => lookForBodyProp(sourceFile, expr))
|
||||
} catch (e) {
|
||||
// continue
|
||||
console.error(`Could not process ${fileName}: ${e}`)
|
||||
}
|
||||
processed++
|
||||
})
|
||||
console.log(`Done scanning ${processed} files`)
|
||||
@ -1,143 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { join } = require('path')
|
||||
const { readdirSync, writeFileSync, readFileSync } = require('fs')
|
||||
const minimist = require('minimist')
|
||||
const ora = require('ora')
|
||||
const rimraf = require('rimraf')
|
||||
const standard = require('standard')
|
||||
const downloadArtifacts = require('./download-artifacts')
|
||||
const {
|
||||
generate,
|
||||
genFactory,
|
||||
generateDocs,
|
||||
generateRequestTypes
|
||||
} = require('./utils')
|
||||
|
||||
start(minimist(process.argv.slice(2), {
|
||||
string: ['version', 'hash']
|
||||
}))
|
||||
|
||||
function start (opts) {
|
||||
if (opts.version == null) {
|
||||
console.error('Missing version parameter')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const packageFolder = join(__dirname, '..', 'api')
|
||||
const apiOutputFolder = join(packageFolder, 'api')
|
||||
const mainOutputFile = join(packageFolder, 'index.js')
|
||||
const docOutputFile = join(__dirname, '..', 'docs', 'reference.asciidoc')
|
||||
const typeDefFile = join(__dirname, '..', 'index.d.ts')
|
||||
const requestParamsOutputFile = join(packageFolder, 'requestParams.d.ts')
|
||||
|
||||
let log
|
||||
downloadArtifacts({ version: opts.version, hash: opts.hash })
|
||||
.then(onArtifactsDownloaded)
|
||||
.catch(err => {
|
||||
console.log(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
function onArtifactsDownloaded () {
|
||||
log = ora('Generating APIs').start()
|
||||
|
||||
log.text = 'Cleaning API folder...'
|
||||
rimraf.sync(join(apiOutputFolder, '*.js'))
|
||||
|
||||
const allSpec = readdirSync(downloadArtifacts.locations.specFolder)
|
||||
.filter(file => file !== '_common.json')
|
||||
.filter(file => !file.includes('deprecated'))
|
||||
.sort()
|
||||
.map(file => require(join(downloadArtifacts.locations.specFolder, file)))
|
||||
|
||||
const namespaces = namespacify(readdirSync(downloadArtifacts.locations.specFolder))
|
||||
for (const namespace in namespaces) {
|
||||
if (namespace === '_common') continue
|
||||
const code = generate(namespace, namespaces[namespace], downloadArtifacts.locations.specFolder, opts.version)
|
||||
const filePath = join(apiOutputFolder, `${namespace}.js`)
|
||||
writeFileSync(filePath, code, { encoding: 'utf8' })
|
||||
}
|
||||
|
||||
writeFileSync(
|
||||
requestParamsOutputFile,
|
||||
generateRequestTypes(opts.version, allSpec),
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
const { fn: factory, types } = genFactory(apiOutputFolder, downloadArtifacts.locations.specFolder, namespaces)
|
||||
writeFileSync(
|
||||
mainOutputFile,
|
||||
factory,
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
const oldTypeDefString = readFileSync(typeDefFile, 'utf8')
|
||||
const start = oldTypeDefString.indexOf('/* GENERATED */')
|
||||
const end = oldTypeDefString.indexOf('/* /GENERATED */')
|
||||
const newTypeDefString = oldTypeDefString.slice(0, start + 15) + '\n' + types + '\n ' + oldTypeDefString.slice(end)
|
||||
writeFileSync(
|
||||
typeDefFile,
|
||||
newTypeDefString,
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
lintFiles(log, () => {
|
||||
log.text = 'Generating documentation'
|
||||
writeFileSync(
|
||||
docOutputFile,
|
||||
generateDocs(require(join(downloadArtifacts.locations.specFolder, '_common.json')), allSpec),
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
log.succeed('Done!')
|
||||
})
|
||||
}
|
||||
|
||||
function lintFiles (log, cb) {
|
||||
log.text = 'Linting...'
|
||||
const files = [join(packageFolder, '*.js'), join(apiOutputFolder, '*.js')]
|
||||
standard.lintFiles(files, { fix: true }, err => {
|
||||
if (err) {
|
||||
return log.fail(err.message)
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
||||
function namespacify (apis) {
|
||||
return apis
|
||||
.map(api => api.slice(0, -5))
|
||||
.filter(api => api !== '_common')
|
||||
.filter(api => !api.includes('deprecated'))
|
||||
.reduce((acc, val) => {
|
||||
if (val.includes('.')) {
|
||||
val = val.split('.')
|
||||
acc[val[0]] = acc[val[0]] || []
|
||||
acc[val[0]].push(val[1])
|
||||
} else {
|
||||
acc[val] = []
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
}
|
||||
@ -1,8 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
exec docker run \
|
||||
--rm \
|
||||
-e ELASTICSEARCH_URL="http://elasticsearch:9200" \
|
||||
-p 5601:5601 \
|
||||
--network=elastic \
|
||||
docker.elastic.co/kibana/kibana:7.0.0-beta1
|
||||
@ -1,139 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { accessSync, mkdirSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
const Git = require('simple-git')
|
||||
|
||||
const esRepo = 'https://github.com/elastic/elasticsearch.git'
|
||||
const esFolder = join(__dirname, '..', '..', 'elasticsearch')
|
||||
const apiFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'api')
|
||||
const xPackFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'api')
|
||||
|
||||
function cloneAndCheckout (opts, callback) {
|
||||
const { log, tag, branch } = opts
|
||||
withTag(tag, callback)
|
||||
|
||||
/**
|
||||
* Sets the elasticsearch repository to the given tag.
|
||||
* If the repository is not present in `esFolder` it will
|
||||
* clone the repository and the checkout the tag.
|
||||
* If the repository is already present but it cannot checkout to
|
||||
* the given tag, it will perform a pull and then try again.
|
||||
* @param {string} tag
|
||||
* @param {function} callback
|
||||
*/
|
||||
function withTag (tag, callback) {
|
||||
let fresh = false
|
||||
let retry = 0
|
||||
|
||||
if (!pathExist(esFolder)) {
|
||||
if (!createFolder(esFolder)) {
|
||||
log.fail('Failed folder creation')
|
||||
return
|
||||
}
|
||||
fresh = true
|
||||
}
|
||||
|
||||
const git = Git(esFolder)
|
||||
|
||||
if (fresh) {
|
||||
clone(checkout)
|
||||
} else if (opts.branch) {
|
||||
checkout(true)
|
||||
} else {
|
||||
checkout()
|
||||
}
|
||||
|
||||
function checkout (alsoPull = false) {
|
||||
if (branch) {
|
||||
log.text = `Checking out branch '${branch}'`
|
||||
} else {
|
||||
log.text = `Checking out tag '${tag}'`
|
||||
}
|
||||
git.checkout(branch || tag, err => {
|
||||
if (err) {
|
||||
if (retry++ > 0) {
|
||||
callback(new Error(`Cannot checkout tag '${tag}'`), { apiFolder, xPackFolder })
|
||||
return
|
||||
}
|
||||
return pull(checkout)
|
||||
}
|
||||
if (alsoPull) {
|
||||
return pull(checkout)
|
||||
}
|
||||
callback(null, { apiFolder, xPackFolder })
|
||||
})
|
||||
}
|
||||
|
||||
function pull (cb) {
|
||||
log.text = 'Pulling elasticsearch repository...'
|
||||
git.pull(err => {
|
||||
if (err) {
|
||||
callback(err, { apiFolder, xPackFolder })
|
||||
return
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
||||
function clone (cb) {
|
||||
log.text = 'Cloning elasticsearch repository...'
|
||||
git.clone(esRepo, esFolder, err => {
|
||||
if (err) {
|
||||
callback(err, { apiFolder, xPackFolder })
|
||||
return
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given path exists
|
||||
* @param {string} path
|
||||
* @returns {boolean} true if exists, false if not
|
||||
*/
|
||||
function pathExist (path) {
|
||||
try {
|
||||
accessSync(path)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the given folder
|
||||
* @param {string} name
|
||||
* @returns {boolean} true on success, false on failure
|
||||
*/
|
||||
function createFolder (name) {
|
||||
try {
|
||||
mkdirSync(name)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = cloneAndCheckout
|
||||
@ -1,553 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
|
||||
'use strict'
|
||||
|
||||
const { join } = require('path')
|
||||
const dedent = require('dedent')
|
||||
const allowedMethods = {
|
||||
noBody: ['GET', 'HEAD', 'DELETE'],
|
||||
body: ['POST', 'PUT', 'DELETE']
|
||||
}
|
||||
|
||||
// if a parameter is depracted in a minor release
|
||||
// we should be able to support it until the next major
|
||||
const deprecatedParameters = require('./patch.json')
|
||||
|
||||
// list of apis that does not need any kind of validation
|
||||
// because of how the url is built or the `type` handling in ES7
|
||||
const noPathValidation = [
|
||||
'create',
|
||||
'exists',
|
||||
'explain',
|
||||
'get',
|
||||
'get_source',
|
||||
'index',
|
||||
'indices.get_alias',
|
||||
'indices.exists_alias',
|
||||
'indices.get_field_mapping',
|
||||
'indices.get_mapping',
|
||||
'indices.get_settings',
|
||||
'indices.put_mapping',
|
||||
'indices.stats',
|
||||
'delete',
|
||||
'nodes.info',
|
||||
'nodes.stats',
|
||||
'nodes.usage',
|
||||
'tasks.cancel',
|
||||
'termvectors',
|
||||
'update'
|
||||
]
|
||||
|
||||
function generateNamespace (namespace, nested, specFolder, version) {
|
||||
const common = require(join(specFolder, '_common.json'))
|
||||
let code = dedent`
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
`
|
||||
if (nested.length > 0) {
|
||||
let getters = ''
|
||||
for (const n of nested) {
|
||||
if (n.includes('_')) {
|
||||
const nameSnaked = n
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
getters += `${n}: { get () { return this.${nameSnaked} } },\n`
|
||||
}
|
||||
}
|
||||
const api = generateMultiApi(version, namespace, nested, common, specFolder)
|
||||
if (getters.length > 0) {
|
||||
getters = `Object.defineProperties(${api.namespace}Api.prototype, {\n${getters}})`
|
||||
}
|
||||
|
||||
code += `
|
||||
const acceptedQuerystring = ${JSON.stringify(api.acceptedQuerystring)}
|
||||
const snakeCase = ${JSON.stringify(api.snakeCase)}
|
||||
|
||||
function ${api.namespace}Api (transport, ConfigurationError) {
|
||||
this.transport = transport
|
||||
this[kConfigurationError] = ConfigurationError
|
||||
}
|
||||
|
||||
${api.code}
|
||||
|
||||
${getters}
|
||||
|
||||
module.exports = ${api.namespace}Api
|
||||
`
|
||||
} else {
|
||||
const spec = require(join(specFolder, `${namespace}.json`))
|
||||
const api = generateSingleApi(version, spec, common)
|
||||
code += `
|
||||
const acceptedQuerystring = ${JSON.stringify(api.acceptedQuerystring)}
|
||||
const snakeCase = ${JSON.stringify(api.snakeCase)}
|
||||
|
||||
${api.code}
|
||||
|
||||
module.exports = ${api.name}Api
|
||||
`
|
||||
}
|
||||
return code
|
||||
}
|
||||
|
||||
function generateMultiApi (version, namespace, nested, common, specFolder) {
|
||||
const namespaceSnaked = namespace
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
let code = ''
|
||||
const snakeCase = {}
|
||||
const acceptedQuerystring = []
|
||||
for (const n of nested) {
|
||||
const nameSnaked = n
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
const spec = require(join(specFolder, `${namespace}.${n}.json`))
|
||||
const api = generateSingleApi(version, spec, common)
|
||||
code += `${Uppercase(namespaceSnaked)}Api.prototype.${nameSnaked} = ${api.code}\n\n`
|
||||
Object.assign(snakeCase, api.snakeCase)
|
||||
for (const q of api.acceptedQuerystring) {
|
||||
if (!acceptedQuerystring.includes(q)) {
|
||||
acceptedQuerystring.push(q)
|
||||
}
|
||||
}
|
||||
}
|
||||
return { code, snakeCase, acceptedQuerystring, namespace: Uppercase(namespaceSnaked) }
|
||||
}
|
||||
|
||||
function generateSingleApi (version, spec, common) {
|
||||
const release = version.charAt(0)
|
||||
const api = Object.keys(spec)[0]
|
||||
const name = api
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
const { paths } = spec[api].url
|
||||
const { params } = spec[api]
|
||||
const acceptedQuerystring = []
|
||||
const required = []
|
||||
|
||||
const methods = paths.reduce((acc, val) => {
|
||||
for (const method of val.methods) {
|
||||
if (!acc.includes(method)) acc.push(method)
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
const parts = paths.reduce((acc, val) => {
|
||||
if (!val.parts) return acc
|
||||
for (const part of Object.keys(val.parts)) {
|
||||
if (!acc.includes(part)) acc.push(part)
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
// get the required parts from the url
|
||||
// if the url has at least one static path,
|
||||
// then there are not required parts of the url
|
||||
let allParts = []
|
||||
for (const path of paths) {
|
||||
if (path.parts) {
|
||||
allParts.push(Object.keys(path.parts))
|
||||
} else {
|
||||
allParts = []
|
||||
break
|
||||
}
|
||||
}
|
||||
if (allParts.length > 0) {
|
||||
intersect(...allParts).forEach(r => required.push(r))
|
||||
}
|
||||
|
||||
for (const key in params) {
|
||||
if (params[key].required) {
|
||||
required.push(key)
|
||||
}
|
||||
|
||||
acceptedQuerystring.push(key)
|
||||
if (deprecatedParameters[release] && deprecatedParameters[release][key]) {
|
||||
acceptedQuerystring.push(deprecatedParameters[release][key])
|
||||
}
|
||||
}
|
||||
|
||||
for (const key in spec[api]) {
|
||||
const k = spec[api][key]
|
||||
if (k && k.required) {
|
||||
required.push(key)
|
||||
}
|
||||
}
|
||||
if (common && common.params) {
|
||||
for (const key in common.params) {
|
||||
acceptedQuerystring.push(key)
|
||||
}
|
||||
}
|
||||
|
||||
const code = `
|
||||
function ${name}Api (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
${genRequiredChecks()}
|
||||
|
||||
${genUrlValidation(paths, api)}
|
||||
|
||||
let { ${genQueryDenylist(false)}, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
${buildPath()}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
${genBody(api, methods, spec[api].body, spec)}
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
`.trim() // always call trim to avoid newlines
|
||||
|
||||
return {
|
||||
name,
|
||||
code,
|
||||
acceptedQuerystring: acceptedQuerystring,
|
||||
snakeCase: genSnakeCaseMap(),
|
||||
documentation: generateDocumentation(spec[api], api)
|
||||
}
|
||||
|
||||
function genRequiredChecks () {
|
||||
const code = required
|
||||
.map(_genRequiredCheck)
|
||||
.concat(_noBody())
|
||||
.filter(Boolean)
|
||||
|
||||
if (code.length) {
|
||||
code.unshift('// check required parameters')
|
||||
}
|
||||
|
||||
return code.join('\n ')
|
||||
|
||||
function _genRequiredCheck (param) {
|
||||
const camelCased = param[0] === '_'
|
||||
? '_' + param.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: param.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
if (param === camelCased) {
|
||||
const check = `
|
||||
if (params['${param}'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: ${param}')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
`
|
||||
return check.trim()
|
||||
} else {
|
||||
const check = `
|
||||
if (params['${param}'] == null && params['${camelCased}'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: ${param} or ${camelCased}')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
`
|
||||
return check.trim()
|
||||
}
|
||||
}
|
||||
|
||||
function _noBody () {
|
||||
const check = `
|
||||
if (params.body != null) {
|
||||
const err = new this[kConfigurationError]('This API does not require a body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
`
|
||||
return spec[api].body === null ? check.trim() : ''
|
||||
}
|
||||
}
|
||||
|
||||
function genSnakeCaseMap () {
|
||||
const toCamelCase = str => {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
return acceptedQuerystring.reduce((acc, val, index) => {
|
||||
if (toCamelCase(val) !== val) {
|
||||
acc[toCamelCase(val)] = val
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
function genQueryDenylist (addQuotes = true) {
|
||||
const toCamelCase = str => {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
const denylist = ['method', 'body']
|
||||
parts.forEach(p => {
|
||||
const camelStr = toCamelCase(p)
|
||||
if (camelStr !== p) denylist.push(`${camelStr}`)
|
||||
denylist.push(`${p}`)
|
||||
})
|
||||
return addQuotes ? denylist.map(q => `'${q}'`) : denylist
|
||||
}
|
||||
|
||||
function buildPath () {
|
||||
const toCamelCase = str => {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
const genAccessKey = str => {
|
||||
const camelStr = toCamelCase(str)
|
||||
return camelStr === str
|
||||
? str
|
||||
: `${str} || ${camelStr}`
|
||||
}
|
||||
|
||||
const genCheck = path => {
|
||||
return path
|
||||
.split('/')
|
||||
.filter(Boolean)
|
||||
.map(p => p.startsWith('{') ? `(${genAccessKey(p.slice(1, -1))}) != null` : false)
|
||||
.filter(Boolean)
|
||||
.join(' && ')
|
||||
}
|
||||
|
||||
const genPath = path => {
|
||||
path = path
|
||||
.split('/')
|
||||
.filter(Boolean)
|
||||
.map(p => p.startsWith('{') ? `encodeURIComponent(${genAccessKey(p.slice(1, -1))})` : `'${p}'`)
|
||||
.join(' + \'/\' + ')
|
||||
return path.length > 0 ? ('\'/\' + ' + path) : '\'/\''
|
||||
}
|
||||
|
||||
let hasStaticPath = false
|
||||
let sortedPaths = paths
|
||||
// some legacy API have mutliple statis paths
|
||||
// this filter removes them
|
||||
.filter(p => {
|
||||
if (p.path.includes('{')) return true
|
||||
if (hasStaticPath === false && p.deprecated == null) {
|
||||
hasStaticPath = true
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
// sort by number of parameters (desc)
|
||||
.sort((a, b) => Object.keys(b.parts || {}).length - Object.keys(a.parts || {}).length)
|
||||
|
||||
const allDeprecated = paths.filter(path => path.deprecated != null)
|
||||
if (allDeprecated.length === paths.length) sortedPaths = [paths[0]]
|
||||
|
||||
let code = ''
|
||||
for (let i = 0; i < sortedPaths.length; i++) {
|
||||
const { path, methods } = sortedPaths[i]
|
||||
if (sortedPaths.length === 1) {
|
||||
code += `if (method == null) method = ${generatePickMethod(methods)}
|
||||
path = ${genPath(path)}
|
||||
`
|
||||
} else if (i === 0) {
|
||||
code += `if (${genCheck(path)}) {
|
||||
if (method == null) method = ${generatePickMethod(methods)}
|
||||
path = ${genPath(path)}
|
||||
}
|
||||
`
|
||||
} else if (i === sortedPaths.length - 1) {
|
||||
code += ` else {
|
||||
if (method == null) method = ${generatePickMethod(methods)}
|
||||
path = ${genPath(path)}
|
||||
}
|
||||
`
|
||||
} else {
|
||||
code += ` else if (${genCheck(path)}) {
|
||||
if (method == null) method = ${generatePickMethod(methods)}
|
||||
path = ${genPath(path)}
|
||||
}
|
||||
`
|
||||
}
|
||||
}
|
||||
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
function generatePickMethod (methods) {
|
||||
if (methods.length === 1) {
|
||||
return `'${methods[0]}'`
|
||||
}
|
||||
const bodyMethod = getBodyMethod(methods)
|
||||
const noBodyMethod = getNoBodyMethod(methods)
|
||||
if (bodyMethod && noBodyMethod) {
|
||||
return `body == null ? '${noBodyMethod}' : '${bodyMethod}'`
|
||||
} else if (bodyMethod) {
|
||||
return `'${bodyMethod}'`
|
||||
} else {
|
||||
return `'${noBodyMethod}'`
|
||||
}
|
||||
}
|
||||
|
||||
function genBody (api, methods, body, spec) {
|
||||
const bodyMethod = getBodyMethod(methods)
|
||||
const { content_type } = spec[api].headers
|
||||
if (content_type && content_type.includes('application/x-ndjson')) {
|
||||
return 'bulkBody: body,'
|
||||
}
|
||||
if (body === null && bodyMethod) {
|
||||
return 'body: \'\','
|
||||
} else if (bodyMethod) {
|
||||
return 'body: body || \'\','
|
||||
} else {
|
||||
return 'body: null,'
|
||||
}
|
||||
}
|
||||
|
||||
function getBodyMethod (methods) {
|
||||
const m = methods.filter(m => ~allowedMethods.body.indexOf(m))
|
||||
if (m.length) return m[0]
|
||||
return null
|
||||
}
|
||||
|
||||
function getNoBodyMethod (methods) {
|
||||
const m = methods.filter(m => ~allowedMethods.noBody.indexOf(m))
|
||||
if (m.length) return m[0]
|
||||
return null
|
||||
}
|
||||
|
||||
function genUrlValidation (paths, api) {
|
||||
// this api does not need url validation
|
||||
if (!needsPathValidation(api)) return ''
|
||||
// gets only the dynamic components of the url in an array
|
||||
// then we reverse it. A parameters always require what is
|
||||
// at its right in the array.
|
||||
const chunks = paths
|
||||
.sort((a, b) => Object.keys(a.parts || {}).length > Object.keys(b.parts || {}).length ? -1 : 1)
|
||||
.slice(0, 1)
|
||||
.reduce((acc, val) => val.path, '')
|
||||
// .reduce((a, b) => a.path.split('/').length > b.path.split('/').length ? a.path : b.path)
|
||||
.split('/')
|
||||
.filter(s => s.startsWith('{'))
|
||||
.map(s => s.slice(1, -1))
|
||||
.reverse()
|
||||
|
||||
let code = ''
|
||||
|
||||
const len = chunks.length
|
||||
chunks.forEach((chunk, index) => {
|
||||
if (index === len - 1) return
|
||||
const params = []
|
||||
let camelCased = chunk[0] === '_'
|
||||
? '_' + chunk.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: chunk.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
if (chunk === camelCased) {
|
||||
code += `${index ? '} else ' : ''}if (params['${chunk}'] != null && (`
|
||||
} else {
|
||||
code += `${index ? '} else ' : ''}if ((params['${chunk}'] != null || params['${camelCased}'] != null) && (`
|
||||
}
|
||||
for (let i = index + 1; i < len; i++) {
|
||||
params.push(chunks[i])
|
||||
// url parts can be declared in camelCase fashion
|
||||
camelCased = chunks[i][0] === '_'
|
||||
? '_' + chunks[i].slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: chunks[i].replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
if (chunks[i] === camelCased) {
|
||||
code += `params['${chunks[i]}'] == null${i === len - 1 ? '' : ' || '}`
|
||||
} else {
|
||||
code += `(params['${chunks[i]}'] == null && params['${camelCased}'] == null)${i === len - 1 ? '' : ' || '}`
|
||||
}
|
||||
}
|
||||
code += `)) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: ${params.join(', ')}')
|
||||
return handleError(err, callback)
|
||||
`
|
||||
})
|
||||
|
||||
if (chunks.length > 1) {
|
||||
code += '\n}'
|
||||
}
|
||||
|
||||
if (code.length) {
|
||||
code = '// check required url components\n' + code
|
||||
}
|
||||
|
||||
return code.trim()
|
||||
}
|
||||
|
||||
function generateDocumentation ({ documentation }, op) {
|
||||
// we use `replace(/\u00A0/g, ' ')` to remove no breaking spaces
|
||||
// because some parts of the description fields are using it
|
||||
|
||||
if (documentation == null) return ''
|
||||
|
||||
let doc = '/**\n'
|
||||
doc += ` * Perform a ${op} request\n`
|
||||
if (documentation.description) {
|
||||
doc += ` * ${documentation.description.replace(/\u00A0/g, ' ')}\n`
|
||||
}
|
||||
if (documentation.url) {
|
||||
doc += ` * ${documentation.url}\n`
|
||||
}
|
||||
doc += ' */'
|
||||
|
||||
return doc
|
||||
}
|
||||
|
||||
function needsPathValidation (api) {
|
||||
return noPathValidation.indexOf(api) === -1
|
||||
}
|
||||
|
||||
function intersect (first, ...rest) {
|
||||
return rest.reduce((accum, current) => {
|
||||
return accum.filter(x => current.indexOf(x) !== -1)
|
||||
}, first)
|
||||
}
|
||||
|
||||
function Uppercase (str) {
|
||||
return str[0].toUpperCase() + str.slice(1)
|
||||
}
|
||||
|
||||
module.exports = generateNamespace
|
||||
@ -1,318 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { readdirSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
const dedent = require('dedent')
|
||||
|
||||
const codeExamples = readdirSync(join(__dirname, '..', '..', 'docs', 'examples'))
|
||||
.map(file => file.slice(0, -9))
|
||||
.filter(api => api !== 'index')
|
||||
|
||||
function generateDocs (common, spec) {
|
||||
let doc = dedent`
|
||||
[[api-reference]]
|
||||
|
||||
////////
|
||||
|
||||
|
||||
|
||||
===========================================================================================================================
|
||||
|| ||
|
||||
|| ||
|
||||
|| ||
|
||||
|| ██████╗ ███████╗ █████╗ ██████╗ ███╗ ███╗███████╗ ||
|
||||
|| ██╔══██╗██╔════╝██╔══██╗██╔══██╗████╗ ████║██╔════╝ ||
|
||||
|| ██████╔╝█████╗ ███████║██║ ██║██╔████╔██║█████╗ ||
|
||||
|| ██╔══██╗██╔══╝ ██╔══██║██║ ██║██║╚██╔╝██║██╔══╝ ||
|
||||
|| ██║ ██║███████╗██║ ██║██████╔╝██║ ╚═╝ ██║███████╗ ||
|
||||
|| ╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═╝╚══════╝ ||
|
||||
|| ||
|
||||
|| ||
|
||||
|| This file is autogenerated, DO NOT send pull requests that changes this file directly. ||
|
||||
|| You should update the script that does the generation, which can be found in '/scripts/utils/generateDocs.js'. ||
|
||||
|| ||
|
||||
|| You can run the script with the following command: ||
|
||||
|| node scripts/generate --branch <branch_name> ||
|
||||
|| or ||
|
||||
|| node scripts/generate --tag <tag_name> ||
|
||||
|| ||
|
||||
|| ||
|
||||
|| ||
|
||||
===========================================================================================================================
|
||||
|
||||
|
||||
|
||||
////////
|
||||
|
||||
== API Reference
|
||||
|
||||
This document contains the entire list of the Elasticsearch API supported by the client, both OSS and commercial. The client is entirely licensed under Apache 2.0.
|
||||
|
||||
Elasticsearch exposes an HTTP layer to communicate with, and the client is a library that will help you do this. Because of this reason, you will see HTTP related parameters, such as ${'`'}body${'`'} or ${'`'}headers${'`'}.
|
||||
|
||||
Every API can accept two objects, the first contains all the parameters that will be sent to Elasticsearch, while the second includes the request specific parameters, such as timeouts, headers, and so on.
|
||||
In the first object, every parameter but the body will be sent via querystring or url parameter, depending on the API, and every unrecognized parameter will be sent as querystring.
|
||||
|
||||
[source,js]
|
||||
----
|
||||
// promise API
|
||||
const result = await client.search({
|
||||
index: 'my-index',
|
||||
from: 20,
|
||||
size: 10,
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
})
|
||||
|
||||
// callback API
|
||||
client.search({
|
||||
index: 'my-index',
|
||||
from: 20,
|
||||
size: 10,
|
||||
body: { foo: 'bar' }
|
||||
}, {
|
||||
ignore: [404],
|
||||
maxRetries: 3
|
||||
}, (err, result) => {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
----
|
||||
|
||||
In this document, you will find the reference of every parameter accepted by the querystring or the url. If you also need to send the body, you can find the documentation of its format in the reference link that is present along with every endpoint.
|
||||
|
||||
\n\n`
|
||||
doc += commonParameters(common)
|
||||
spec.forEach(s => {
|
||||
doc += '\n' + generateApiDoc(s)
|
||||
})
|
||||
return doc
|
||||
}
|
||||
|
||||
function commonParameters (spec) {
|
||||
let doc = dedent`
|
||||
[discrete]
|
||||
=== Common parameters
|
||||
Parameters that are accepted by all API endpoints.
|
||||
|
||||
link:{ref}/common-options.html[Documentation]
|
||||
[cols=2*]
|
||||
|===\n`
|
||||
Object.keys(spec.params).forEach(key => {
|
||||
const name = isSnakeCased(key) && key !== camelify(key)
|
||||
? '`' + key + '` or `' + camelify(key) + '`'
|
||||
: '`' + key + '`'
|
||||
|
||||
doc += dedent`
|
||||
|${name}
|
||||
|${'`' + spec.params[key].type + '`'} - ${spec.params[key].description}`
|
||||
if (spec.params[key].default) {
|
||||
doc += ` +
|
||||
_Default:_ ${'`' + spec.params[key].default + '`'}`
|
||||
}
|
||||
doc += '\n\n'
|
||||
})
|
||||
|
||||
doc += dedent`
|
||||
|===
|
||||
`
|
||||
return doc
|
||||
}
|
||||
|
||||
function generateApiDoc (spec) {
|
||||
const name = Object.keys(spec)[0]
|
||||
const documentationUrl = spec[name].documentation && spec[name].documentation.url
|
||||
? fixLink(name, spec[name].documentation.url)
|
||||
: ''
|
||||
const params = []
|
||||
// url params
|
||||
const urlParts = spec[name].url.paths.reduce((acc, path) => {
|
||||
if (!path.parts) return acc
|
||||
for (const part in path.parts) {
|
||||
if (acc[part] != null) continue
|
||||
acc[part] = path.parts[part]
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
if (urlParts) {
|
||||
Object.keys(urlParts).forEach(param => {
|
||||
params.push({
|
||||
name: param,
|
||||
type: getType(urlParts[param].type, urlParts[param].options),
|
||||
description: urlParts[param].description,
|
||||
default: urlParts[param].default,
|
||||
deprecated: !!urlParts[param].deprecated
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// query params
|
||||
const urlParams = spec[name].params
|
||||
if (urlParams) {
|
||||
Object.keys(urlParams).forEach(param => {
|
||||
const duplicate = params.find(ele => ele.name === param)
|
||||
if (duplicate) return
|
||||
params.push({
|
||||
name: param,
|
||||
type: getType(urlParams[param].type, urlParams[param].options),
|
||||
description: urlParams[param].description,
|
||||
default: urlParams[param].default,
|
||||
deprecated: !!urlParams[param].deprecated
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// body params
|
||||
const body = spec[name].body
|
||||
if (body) {
|
||||
params.push({
|
||||
name: 'body',
|
||||
type: 'object',
|
||||
description: body.description,
|
||||
default: body.default,
|
||||
deprecated: !!body.deprecated
|
||||
})
|
||||
}
|
||||
|
||||
const codeParameters = params
|
||||
.reduce((acc, val) => {
|
||||
const code = `${val.name}: ${val.type},`
|
||||
acc += acc === ''
|
||||
? code
|
||||
: '\n ' + code
|
||||
|
||||
return acc
|
||||
}, '')
|
||||
// remove last comma
|
||||
.slice(0, -1)
|
||||
|
||||
const stability = spec[name].stability === 'stable'
|
||||
? ''
|
||||
: `*Stability:* ${spec[name].stability}`
|
||||
|
||||
let doc = dedent`
|
||||
[discrete]
|
||||
=== ${camelify(name)}
|
||||
${stability}
|
||||
[source,ts]
|
||||
----
|
||||
client.${camelify(name)}(${codeParameters.length > 0 ? `{\n ${codeParameters}\n}` : ''})
|
||||
----\n`
|
||||
if (documentationUrl) {
|
||||
doc += `link:${documentationUrl}[Documentation] +\n`
|
||||
}
|
||||
if (codeExamples.includes(name)) {
|
||||
doc += `{jsclient}/${name.replace(/\./g, '_')}_examples.html[Code Example] +\n`
|
||||
}
|
||||
|
||||
if (params.length !== 0) {
|
||||
doc += dedent`[cols=2*]
|
||||
|===\n`
|
||||
doc += params.reduce((acc, val) => {
|
||||
const name = isSnakeCased(val.name) && val.name !== camelify(val.name)
|
||||
? '`' + val.name + '` or `' + camelify(val.name) + '`'
|
||||
: '`' + val.name + '`'
|
||||
acc += dedent`
|
||||
|${name}
|
||||
|${'`' + val.type.replace(/\|/g, '\\|') + '`'} - ${val.description}`
|
||||
if (val.default) {
|
||||
acc += ` +\n_Default:_ ${'`' + val.default + '`'}`
|
||||
}
|
||||
if (val.deprecated) {
|
||||
acc += ' +\n\nWARNING: This parameter has been deprecated.'
|
||||
}
|
||||
return acc + '\n\n'
|
||||
}, '')
|
||||
|
||||
doc += dedent`
|
||||
|===
|
||||
`
|
||||
}
|
||||
doc += '\n'
|
||||
return doc
|
||||
}
|
||||
|
||||
const LINK_OVERRIDES = {
|
||||
'license.delete': '{ref}/delete-license.html',
|
||||
'license.get': '{ref}/get-license.html',
|
||||
'license.get_basic_status': '{ref}/get-basic-status.html',
|
||||
'license.get_trial_status': '{ref}/get-trial-status.html',
|
||||
'license.post': '{ref}/update-license.html',
|
||||
'license.post_start_basic': '{ref}/start-basic.html',
|
||||
'license.post_start_trial': '{ref}/start-trial.html',
|
||||
'migration.deprecations': '{ref}/migration-api-deprecation.html',
|
||||
'monitoring.bulk': '{ref}/monitor-elasticsearch-cluster.html',
|
||||
'ingest.delete_pipeline': '{ref}/delete-pipeline-api.html',
|
||||
'ingest.get_pipeline': '{ref}/get-pipeline-api.html',
|
||||
'ingest.put_pipeline': '{ref}/put-pipeline-api.html',
|
||||
'ingest.simulate': '{ref}/simulate-pipeline-api.html',
|
||||
'ingest.processor_grok': '{ref}/grok-processor.html#grok-processor-rest-get'
|
||||
}
|
||||
// Fixes bad urls in the JSON spec
|
||||
function fixLink (name, str) {
|
||||
/* In 6.x some API start with `xpack.` when in master they do not. We
|
||||
* can safely ignore that for link generation. */
|
||||
name = name.replace(/^xpack\./, '')
|
||||
const override = LINK_OVERRIDES[name]
|
||||
if (override) return override
|
||||
if (!str) return ''
|
||||
/* Replace references to the guide with the attribute {ref} because
|
||||
* the json files in the Elasticsearch repo are a bit of a mess. */
|
||||
str = str.replace(/^.+guide\/en\/elasticsearch\/reference\/[^/]+\/([^./]*\.html(?:#.+)?)$/, '{ref}/$1')
|
||||
str = str.replace(/frozen\.html/, 'freeze-index-api.html')
|
||||
str = str.replace(/ml-file-structure\.html/, 'ml-find-file-structure.html')
|
||||
str = str.replace(/security-api-get-user-privileges\.html/, 'security-api-get-privileges.html')
|
||||
|
||||
return str
|
||||
}
|
||||
|
||||
function getType (type, options) {
|
||||
switch (type) {
|
||||
case 'list':
|
||||
return 'string | string[]'
|
||||
case 'date':
|
||||
case 'time':
|
||||
case 'timeout':
|
||||
return 'string'
|
||||
case 'enum':
|
||||
return options.map(k => `'${k}'`).join(' | ')
|
||||
case 'int':
|
||||
case 'double':
|
||||
case 'long':
|
||||
return 'number'
|
||||
default:
|
||||
return type
|
||||
}
|
||||
}
|
||||
|
||||
function camelify (str) {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
function isSnakeCased (str) {
|
||||
return !!~str.indexOf('_')
|
||||
}
|
||||
|
||||
module.exports = generateDocs
|
||||
@ -1,299 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint-disable no-template-curly-in-string */
|
||||
/* eslint camelcase: 0 */
|
||||
|
||||
'use strict'
|
||||
|
||||
const { readdirSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
const dedent = require('dedent')
|
||||
const deepmerge = require('deepmerge')
|
||||
|
||||
function genFactory (folder, specFolder, namespaces) {
|
||||
// get all the API files
|
||||
// const apiFiles = readdirSync(folder)
|
||||
const apiFiles = readdirSync(specFolder)
|
||||
.filter(file => file !== '_common.json')
|
||||
.filter(file => !file.includes('deprecated'))
|
||||
.sort()
|
||||
const types = apiFiles
|
||||
.map(file => {
|
||||
const name = file
|
||||
.slice(0, -5)
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
return file
|
||||
.slice(0, -5) // remove `.json` extension
|
||||
.split('.')
|
||||
.reverse()
|
||||
.reduce((acc, val) => {
|
||||
const spec = readSpec(specFolder, file.slice(0, -5))
|
||||
const isHead = isHeadMethod(spec, file.slice(0, -5))
|
||||
const body = hasBody(spec, file.slice(0, -5))
|
||||
const methods = acc === null ? buildMethodDefinition({ kibana: false }, val, name, body, isHead, spec) : null
|
||||
const obj = {}
|
||||
if (methods) {
|
||||
for (const m of methods) {
|
||||
obj[m.key] = m.val
|
||||
}
|
||||
} else {
|
||||
obj[val] = acc
|
||||
if (isSnakeCased(val)) {
|
||||
obj[camelify(val)] = acc
|
||||
}
|
||||
}
|
||||
return obj
|
||||
}, null)
|
||||
})
|
||||
.reduce((acc, val) => deepmerge(acc, val), {})
|
||||
|
||||
const kibanaTypes = apiFiles
|
||||
.map(file => {
|
||||
const name = file
|
||||
.slice(0, -5)
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
return file
|
||||
.slice(0, -5) // remove `.json` extension
|
||||
.split('.')
|
||||
.reverse()
|
||||
.reduce((acc, val) => {
|
||||
const spec = readSpec(specFolder, file.slice(0, -5))
|
||||
const isHead = isHeadMethod(spec, file.slice(0, -5))
|
||||
const body = hasBody(spec, file.slice(0, -5))
|
||||
const methods = acc === null ? buildMethodDefinition({ kibana: true }, val, name, body, isHead, spec) : null
|
||||
const obj = {}
|
||||
if (methods) {
|
||||
for (const m of methods) {
|
||||
obj[m.key] = m.val
|
||||
}
|
||||
} else {
|
||||
obj[camelify(val)] = acc
|
||||
}
|
||||
return obj
|
||||
}, null)
|
||||
})
|
||||
.reduce((acc, val) => deepmerge(acc, val), {})
|
||||
|
||||
// serialize the type object
|
||||
const typesStr = Object.keys(types)
|
||||
.map(key => {
|
||||
const line = ` ${key}: ${JSON.stringify(types[key], null, 4)}`
|
||||
if (line.slice(-1) === '}') {
|
||||
return line.slice(0, -1) + ' }'
|
||||
}
|
||||
return line
|
||||
})
|
||||
.join('\n')
|
||||
// remove useless quotes and commas
|
||||
.replace(/"/g, '')
|
||||
.replace(/,$/gm, '')
|
||||
const kibanaTypesStr = Object.keys(kibanaTypes)
|
||||
.map(key => {
|
||||
const line = ` ${key}: ${JSON.stringify(kibanaTypes[key], null, 4)}`
|
||||
if (line.slice(-1) === '}') {
|
||||
return line.slice(0, -1) + ' }'
|
||||
}
|
||||
return line
|
||||
})
|
||||
.join('\n')
|
||||
// remove useless quotes and commas
|
||||
.replace(/"/g, '')
|
||||
.replace(/,$/gm, '')
|
||||
|
||||
let apisStr = ''
|
||||
const getters = []
|
||||
for (const namespace in namespaces) {
|
||||
if (namespaces[namespace].length > 0) {
|
||||
getters.push(`${camelify(namespace)}: {
|
||||
get () {
|
||||
if (this[k${toPascalCase(camelify(namespace))}] === null) {
|
||||
this[k${toPascalCase(camelify(namespace))}] = new ${toPascalCase(camelify(namespace))}Api(this.transport, this[kConfigurationError])
|
||||
}
|
||||
return this[k${toPascalCase(camelify(namespace))}]
|
||||
}
|
||||
},\n`)
|
||||
if (namespace.includes('_')) {
|
||||
getters.push(`${namespace}: { get () { return this.${camelify(namespace)} } },\n`)
|
||||
}
|
||||
} else {
|
||||
apisStr += `ESAPI.prototype.${camelify(namespace)} = ${camelify(namespace)}Api\n`
|
||||
if (namespace.includes('_')) {
|
||||
getters.push(`${namespace}: { get () { return this.${camelify(namespace)} } },\n`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
apisStr += '\nObject.defineProperties(ESAPI.prototype, {\n'
|
||||
for (const getter of getters) {
|
||||
apisStr += getter
|
||||
}
|
||||
apisStr += '})'
|
||||
|
||||
let modules = ''
|
||||
let symbols = ''
|
||||
let symbolsInstance = ''
|
||||
for (const namespace in namespaces) {
|
||||
if (namespaces[namespace].length > 0) {
|
||||
modules += `const ${toPascalCase(camelify(namespace))}Api = require('./api/${namespace}')\n`
|
||||
symbols += `const k${toPascalCase(camelify(namespace))} = Symbol('${toPascalCase(camelify(namespace))}')\n`
|
||||
symbolsInstance += `this[k${toPascalCase(camelify(namespace))}] = null\n`
|
||||
} else {
|
||||
modules += `const ${camelify(namespace)}Api = require('./api/${namespace}')\n`
|
||||
}
|
||||
}
|
||||
|
||||
const fn = dedent`
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
${modules}
|
||||
|
||||
const { kConfigurationError } = require('./utils')
|
||||
${symbols}
|
||||
|
||||
function ESAPI (opts) {
|
||||
this[kConfigurationError] = opts.ConfigurationError
|
||||
${symbolsInstance}
|
||||
}
|
||||
|
||||
${apisStr}
|
||||
|
||||
module.exports = ESAPI
|
||||
`
|
||||
|
||||
// new line at the end of file
|
||||
return { fn: fn + '\n', types: typesStr, kibanaTypes: kibanaTypesStr }
|
||||
}
|
||||
|
||||
// from snake_case to camelCase
|
||||
function camelify (str) {
|
||||
return str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
function isSnakeCased (str) {
|
||||
return !!~str.indexOf('_')
|
||||
}
|
||||
|
||||
function toPascalCase (str) {
|
||||
return str[0].toUpperCase() + str.slice(1)
|
||||
}
|
||||
|
||||
function buildMethodDefinition (opts, api, name, hasBody, isHead, spec) {
|
||||
const Name = toPascalCase(name)
|
||||
const { content_type } = spec[Object.keys(spec)[0]].headers
|
||||
const bodyType = content_type && content_type.includes('application/x-ndjson') ? 'RequestNDBody' : 'RequestBody'
|
||||
const responseType = isHead ? 'boolean' : 'Record<string, any>'
|
||||
const defaultBodyType = content_type && content_type.includes('application/x-ndjson') ? 'Record<string, any>[]' : 'Record<string, any>'
|
||||
|
||||
if (opts.kibana) {
|
||||
if (hasBody) {
|
||||
return [
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' }
|
||||
]
|
||||
} else {
|
||||
return [
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
if (hasBody) {
|
||||
let methods = [
|
||||
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' },
|
||||
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
|
||||
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
|
||||
{ key: `${api}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' }
|
||||
]
|
||||
if (isSnakeCased(api)) {
|
||||
methods = methods.concat([
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params?: RequestParams.${Name}<TRequestBody>, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TRequestBody extends ${bodyType} = ${defaultBodyType}, TContext = Context>(params: RequestParams.${Name}<TRequestBody>, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' }
|
||||
])
|
||||
}
|
||||
return methods
|
||||
} else {
|
||||
let methods = [
|
||||
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' },
|
||||
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
|
||||
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
|
||||
{ key: `${api}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' }
|
||||
]
|
||||
if (isSnakeCased(api)) {
|
||||
methods = methods.concat([
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params?: RequestParams.${Name}, options?: TransportRequestOptions)`, val: 'TransportRequestPromise<ApiResponse<TResponse, TContext>>' },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' },
|
||||
{ key: `${camelify(api)}<TResponse = ${responseType}, TContext = Context>(params: RequestParams.${Name}, options: TransportRequestOptions, callback: callbackFn<TResponse, TContext>)`, val: 'TransportRequestCallback' }
|
||||
])
|
||||
}
|
||||
return methods
|
||||
}
|
||||
}
|
||||
|
||||
function hasBody (spec, api) {
|
||||
return !!spec[api].body
|
||||
}
|
||||
|
||||
function isHeadMethod (spec, api) {
|
||||
const { paths } = spec[api].url
|
||||
const methods = []
|
||||
for (const path of paths) {
|
||||
for (const method of path.methods) {
|
||||
if (!methods.includes(method)) {
|
||||
methods.push(method)
|
||||
}
|
||||
}
|
||||
}
|
||||
return methods.length === 1 && methods[0] === 'HEAD'
|
||||
}
|
||||
|
||||
function readSpec (specFolder, file) {
|
||||
try {
|
||||
return require(join(specFolder, file))
|
||||
} catch (err) {
|
||||
throw new Error(`Cannot read spec file ${file}`)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = genFactory
|
||||
@ -1,191 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
|
||||
'use strict'
|
||||
|
||||
const deprecatedParameters = require('./patch.json')
|
||||
|
||||
function generate (version, api) {
|
||||
const release = version.charAt(0)
|
||||
let types = `/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { RequestBody, RequestNDBody } from '../lib/Transport'
|
||||
|
||||
export interface Generic {
|
||||
method?: string;
|
||||
filter_path?: string | string[];
|
||||
pretty?: boolean;
|
||||
human?: boolean;
|
||||
error_trace?: boolean;
|
||||
source?: string;
|
||||
}
|
||||
`
|
||||
|
||||
api.forEach(generateRequestType)
|
||||
return types
|
||||
|
||||
function generateRequestType (spec) {
|
||||
const api = Object.keys(spec)[0]
|
||||
const name = api
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
const { paths = {} } = spec[api].url
|
||||
const { body, params = {} } = spec[api]
|
||||
|
||||
// get the required parts from the url
|
||||
// if the url has at least one static path,
|
||||
// then there are not required parts of the url
|
||||
let allParts = []
|
||||
let requiredParts = []
|
||||
for (const path of paths) {
|
||||
if (path.parts) {
|
||||
allParts.push(Object.keys(path.parts))
|
||||
} else {
|
||||
allParts = []
|
||||
break
|
||||
}
|
||||
}
|
||||
if (allParts.length > 0) {
|
||||
requiredParts = intersect(...allParts)
|
||||
}
|
||||
|
||||
const parts = paths.reduce((acc, path) => {
|
||||
if (!path.parts) return acc
|
||||
for (const part in path.parts) {
|
||||
if (acc[part] != null) continue
|
||||
acc[part] = { key: part, value: path.parts[part], required: requiredParts.includes(part) }
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
const deprecatedParametersToAdd = []
|
||||
const paramsArr = Object.keys(params)
|
||||
.filter(k => !Object.keys(parts).includes(k))
|
||||
.map(k => {
|
||||
if (deprecatedParameters[release] && deprecatedParameters[release][k]) {
|
||||
deprecatedParametersToAdd.push({
|
||||
key: deprecatedParameters[release][k],
|
||||
value: params[k],
|
||||
required: params[k].required
|
||||
})
|
||||
}
|
||||
return { key: k, value: params[k], required: params[k].required }
|
||||
})
|
||||
|
||||
const partsArr = Object.keys(parts).map(k => parts[k])
|
||||
deprecatedParametersToAdd.forEach(k => partsArr.push(k))
|
||||
|
||||
const genLine = e => {
|
||||
const optional = e.required ? '' : '?'
|
||||
return `${e.key}${optional}: ${getType(e.value.type, e.value.options)};`
|
||||
}
|
||||
|
||||
const { content_type } = spec[api].headers
|
||||
const bodyGeneric = content_type && content_type.includes('application/x-ndjson') ? 'RequestNDBody' : 'RequestBody'
|
||||
|
||||
const code = `
|
||||
export interface ${toPascalCase(name)}${body ? `<T = ${bodyGeneric}>` : ''} extends Generic {
|
||||
${partsArr.map(genLine).join('\n ')}
|
||||
${paramsArr.map(genLine).join('\n ')}
|
||||
${body ? `body${body.required ? '' : '?'}: T;` : ''}
|
||||
}
|
||||
`
|
||||
|
||||
types += '\n'
|
||||
// remove empty lines
|
||||
types += code.replace(/^\s*\n/gm, '')
|
||||
}
|
||||
|
||||
function getType (type, options) {
|
||||
switch (type) {
|
||||
case 'list':
|
||||
return 'string | string[]'
|
||||
case 'date':
|
||||
case 'time':
|
||||
case 'timeout':
|
||||
return 'string'
|
||||
case 'enum': {
|
||||
// the following code changes 'true' | 'false' to boolean
|
||||
let foundTrue = false
|
||||
let foundFalse = false
|
||||
options = options
|
||||
.map(k => {
|
||||
if (k === 'true') {
|
||||
foundTrue = true
|
||||
return true
|
||||
} else if (k === 'false') {
|
||||
foundFalse = true
|
||||
return false
|
||||
} else {
|
||||
return `'${k}'`
|
||||
}
|
||||
})
|
||||
.filter(k => {
|
||||
if (foundTrue && foundFalse && (k === true || k === false)) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
if (foundTrue && foundFalse) {
|
||||
options.push('boolean')
|
||||
}
|
||||
return options.join(' | ')
|
||||
}
|
||||
case 'int':
|
||||
case 'double':
|
||||
case 'long':
|
||||
return 'number'
|
||||
case 'boolean|long':
|
||||
return 'boolean | number'
|
||||
default:
|
||||
return type
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function intersect (first, ...rest) {
|
||||
return rest.reduce((accum, current) => {
|
||||
return accum.filter(x => current.indexOf(x) !== -1)
|
||||
}, first)
|
||||
}
|
||||
|
||||
function toPascalCase (str) {
|
||||
return str[0].toUpperCase() + str.slice(1)
|
||||
}
|
||||
|
||||
module.exports = generate
|
||||
@ -1,34 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const generate = require('./generateApis')
|
||||
const cloneAndCheckout = require('./clone-es')
|
||||
const genFactory = require('./generateMain')
|
||||
const generateDocs = require('./generateDocs')
|
||||
const generateRequestTypes = require('./generateRequestTypes')
|
||||
|
||||
module.exports = {
|
||||
generate,
|
||||
cloneAndCheckout,
|
||||
genFactory,
|
||||
generateDocs,
|
||||
generateRequestTypes
|
||||
}
|
||||
@ -1,14 +0,0 @@
|
||||
{
|
||||
"6": {
|
||||
"_source_includes": "_source_include",
|
||||
"_source_excludes": "_source_exclude"
|
||||
},
|
||||
"7": {
|
||||
"_source_includes": "_source_include",
|
||||
"_source_excludes": "_source_exclude"
|
||||
},
|
||||
"8": {
|
||||
"_source_includes": "_source_include",
|
||||
"_source_excludes": "_source_exclude"
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
TEST_ES_SERVER=${TEST_ES_SERVER:-"http://localhost:9200"}
|
||||
|
||||
attempt_counter=0
|
||||
max_attempts=5
|
||||
url="${TEST_ES_SERVER}/_cluster/health?wait_for_status=green&timeout=50s"
|
||||
|
||||
echo "Waiting for Elasticsearch..."
|
||||
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' --max-time 55 "$url")" != "200" ]]; do
|
||||
if [ ${attempt_counter} -eq ${max_attempts} ];then
|
||||
echo "\nCouldn't connect to Elasticsearch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
printf '.'
|
||||
attempt_counter=$(($attempt_counter+1))
|
||||
sleep 5
|
||||
done
|
||||
|
||||
echo "\nReady"
|
||||
Reference in New Issue
Block a user