Compare commits
66 Commits
v8.18.1
...
drop-body-
| Author | SHA1 | Date | |
|---|---|---|---|
| 70aae3b44f | |||
| 444975b4e6 | |||
| 341168d2a1 | |||
| f835fa3b12 | |||
| e688f36396 | |||
| b8c3ac446e | |||
| 3a288cf86b | |||
| 100be27ad1 | |||
| c490dd0821 | |||
| e992c329c3 | |||
| bfdae66333 | |||
| ed3cace127 | |||
| f33aa8cccd | |||
| 7cb973a206 | |||
| a4315a905e | |||
| 6447fc10bf | |||
| e9c2f8b0af | |||
| 15b9ee2f06 | |||
| e30e964131 | |||
| 0f187f47c4 | |||
| 101f34bd5e | |||
| ec0c561e36 | |||
| c1e90b12f0 | |||
| 5cb670256e | |||
| 86f488f68f | |||
| 6009fab7fe | |||
| 26ae260058 | |||
| fbbbece711 | |||
| a30c3dca2d | |||
| 36cfacc409 | |||
| 6dc83cd33e | |||
| 7c7ce29127 | |||
| 2b890af355 | |||
| 421f953b00 | |||
| c5e4107181 | |||
| 5880c84c13 | |||
| 290639d168 | |||
| 0b90613694 | |||
| 1ad057abcc | |||
| 44d890ec57 | |||
| 2b2a2f03e6 | |||
| 7bcd75bdb0 | |||
| 2455dac4e5 | |||
| edb5563bf8 | |||
| 11939fd22c | |||
| e0c613f898 | |||
| 20f2c740cd | |||
| 97bdca22d8 | |||
| a7123f807d | |||
| 20ac2a637e | |||
| e287c1edd9 | |||
| 90d43f4f28 | |||
| 572927b4f1 | |||
| 86b4d4e2f9 | |||
| 8e79bf847a | |||
| cef328c93d | |||
| c3247d0c66 | |||
| e9fdcb0647 | |||
| 82acfc33a9 | |||
| 661caf8422 | |||
| 3430734fe0 | |||
| 810e009202 | |||
| c274b1b32f | |||
| 428a7b023d | |||
| aad41df231 | |||
| 34704b2e5c |
@ -25,7 +25,7 @@ steps:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
plugins:
|
||||
- junit-annotate#v2.4.1:
|
||||
- junit-annotate#v2.6.0:
|
||||
artifacts: "junit-output/junit-*.xml"
|
||||
job-uuid-file-pattern: "junit-(.*).xml"
|
||||
fail-build-on-error: true
|
||||
|
||||
26
.github/stale.yml
vendored
26
.github/stale.yml
vendored
@ -1,26 +0,0 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 15
|
||||
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- "discussion"
|
||||
- "feature request"
|
||||
- "bug"
|
||||
- "todo"
|
||||
- "good first issue"
|
||||
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: |
|
||||
We understand that this might be important for you, but this issue has been automatically marked as stale because it has not had recent activity either from our end or yours.
|
||||
It will be closed if no further activity occurs, please write a comment if you would like to keep this going.
|
||||
|
||||
Note: in the past months we have built a new client, that has just landed in master. If you want to open an issue or a pr for the legacy client, you should do that in https://github.com/elastic/elasticsearch-js-legacy
|
||||
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
||||
11
.github/workflows/npm-publish.yml
vendored
11
.github/workflows/npm-publish.yml
vendored
@ -23,15 +23,18 @@ jobs:
|
||||
- run: npm install -g npm
|
||||
- run: npm install
|
||||
- run: npm test
|
||||
- run: npm publish --provenance --access public
|
||||
- run: npm publish --provenance --access public --tag alpha
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- run: |
|
||||
- name: Publish version on GitHub
|
||||
run: |
|
||||
version=$(jq -r .version package.json)
|
||||
gh release create \
|
||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
|
||||
-n "This is a 9.0.0 pre-release alpha. Changes may not be stable." \
|
||||
--latest=false \
|
||||
--prerelease \
|
||||
--target "$BRANCH_NAME" \
|
||||
-t "v$version" \
|
||||
--title "v$version" \
|
||||
"v$version"
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
||||
|
||||
2
.github/workflows/serverless-patch.yml
vendored
2
.github/workflows/serverless-patch.yml
vendored
@ -42,7 +42,7 @@ jobs:
|
||||
- name: Apply patch from stack to serverless
|
||||
id: apply-patch
|
||||
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
|
||||
- uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||
- uses: peter-evans/create-pull-request@67ccf781d68cd99b580ae25a5c18a1cc84ffff1f # v7
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: serverless
|
||||
|
||||
12
.github/workflows/stale.yml
vendored
12
.github/workflows/stale.yml
vendored
@ -1,21 +1,21 @@
|
||||
---
|
||||
name: 'Close stale issues and PRs'
|
||||
name: "Close stale issues and PRs"
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1 * * *'
|
||||
- cron: "30 1 * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@1160a2240286f5da8ec72b1c0816ce2481aabf84 # v8
|
||||
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9
|
||||
with:
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
days-before-stale: 90
|
||||
days-before-close: 14
|
||||
exempt-issue-labels: 'good first issue'
|
||||
exempt-issue-labels: "good first issue,tracking"
|
||||
close-issue-label: closed-stale
|
||||
close-pr-label: closed-stale
|
||||
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
||||
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
||||
stale-issue-message: "This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
||||
stale-pr-message: "This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
||||
|
||||
@ -167,19 +167,16 @@ const client = new Client({
|
||||
----
|
||||
|
||||
|`nodeFilter`
|
||||
a|`function` - Takes a `Connection` and returns `true` if it can be sent a request, otherwise `false`. +
|
||||
a|`function` - Filters which node not to use for a request. +
|
||||
_Default:_
|
||||
[source,js]
|
||||
----
|
||||
function defaultNodeFilter (conn) {
|
||||
if (conn.roles != null) {
|
||||
if (
|
||||
// avoid master-only nodes
|
||||
conn.roles.master &&
|
||||
!conn.roles.data &&
|
||||
!conn.roles.ingest &&
|
||||
!conn.roles.ml
|
||||
) return false
|
||||
function defaultNodeFilter (node) {
|
||||
// avoid master only nodes
|
||||
if (node.roles.master === true &&
|
||||
node.roles.data === false &&
|
||||
node.roles.ingest === false) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
@ -2,35 +2,15 @@
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 8.18.1
|
||||
=== 9.0.0
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
==== Breaking changes
|
||||
|
||||
[discrete]
|
||||
===== Fix broken node roles and node filter
|
||||
===== Drop support for deprecated `body` parameter
|
||||
|
||||
The docs note a `nodeFilter` option on the client that will, by default, filter the nodes based on any `roles` values that are set at instantition. At some point, this functionality was partially disabled. This brings the feature back, ensuring that it matches what the documentation has said it does all along.
|
||||
|
||||
[discrete]
|
||||
=== 8.18.0
|
||||
|
||||
[discrete]
|
||||
==== Features
|
||||
|
||||
[discrete]
|
||||
===== Support for Elasticsearch `v8.18`
|
||||
|
||||
You can find all the API changes
|
||||
https://www.elastic.co/guide/en/elasticsearch/reference/8.18/release-notes-8.18.0.html[here].
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved Cloud ID parsing
|
||||
|
||||
When using a Cloud ID as the `cloud` parameter to instantiate the client, that ID was assumed to be in the correct format. New assertions have been added to verify that format and throw a `ConfigurationError` if it is invalid. See https://github.com/elastic/elasticsearch-js/issues/2694[#2694].
|
||||
In 8.0, the top-level `body` parameter that was available on all API functions <<remove-body-key,was deprecated>>. In 9.0 this property is completely removed.
|
||||
|
||||
[discrete]
|
||||
=== 8.17.0
|
||||
@ -699,6 +679,7 @@ ac.abort()
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[remove-body-key]]
|
||||
===== Remove the body key from the request
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getDataStream({
|
||||
name: "my-data-stream",
|
||||
filter_path: "data_streams.indices.index_name",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getMapping({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,42 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -5,8 +5,10 @@
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"cluster.routing.allocation.disk.watermark.low": "90%",
|
||||
"cluster.routing.allocation.disk.watermark.high": "95%",
|
||||
"cluster.routing.allocation.disk.watermark.low": "100gb",
|
||||
"cluster.routing.allocation.disk.watermark.high": "50gb",
|
||||
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
|
||||
"cluster.info.update.interval": "1m",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,20 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_inference/chat_completion/openai-completion/_stream",
|
||||
body: {
|
||||
model: "gpt-4o",
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "What is Elastic?",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.addBlock({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
block: "read_only",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,15 +6,14 @@
|
||||
const response = await client.search({
|
||||
index: "test-index",
|
||||
query: {
|
||||
match: {
|
||||
my_semantic_field: "Which country is Paris in?",
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_semantic_field: {
|
||||
number_of_fragments: 2,
|
||||
order: "score",
|
||||
nested: {
|
||||
path: "inference_field.inference.chunks",
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "inference_field.inference.chunks.embeddings",
|
||||
inference_id: "my-inference-id",
|
||||
query: "mountain lake",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -1,19 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.queryRole({
|
||||
query: {
|
||||
bool: {
|
||||
must_not: {
|
||||
term: {
|
||||
"metadata._reserved": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
sort: ["name"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -14,7 +14,6 @@ const response = await client.indices.putSettings({
|
||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||
"index.search.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,67 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-rank-vectors-bit",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[
|
||||
0.35, 0.77, 0.95, 0.15, 0.11, 0.08, 0.58, 0.06, 0.44, 0.52, 0.21,
|
||||
0.62, 0.65, 0.16, 0.64, 0.39, 0.93, 0.06, 0.93, 0.31, 0.92, 0,
|
||||
0.66, 0.86, 0.92, 0.03, 0.81, 0.31, 0.2, 0.92, 0.95, 0.64, 0.19,
|
||||
0.26, 0.77, 0.64, 0.78, 0.32, 0.97, 0.84,
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.addBlock({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
block: "write",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,26 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-rank-vectors-float",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[0.5, 10, 6],
|
||||
[-0.5, 10, 10],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,35 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "attachment",
|
||||
description: "Extract attachment information including original binary",
|
||||
processors: [
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
pipeline: "attachment",
|
||||
document: {
|
||||
data: "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.get({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
@ -4,10 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: "*",
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
"index.indexing.slowlog.include.user": true,
|
||||
"index.indexing.slowlog.threshold.index.warn": "30s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,23 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
mappings: {
|
||||
properties: {
|
||||
source_field: {
|
||||
type: "text",
|
||||
fields: {
|
||||
infer_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: ".elser-2-elasticsearch",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,28 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-*",
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
"user.id": "kimchy",
|
||||
},
|
||||
},
|
||||
],
|
||||
must_not: [
|
||||
{
|
||||
terms: {
|
||||
_index: ["my-index-01"],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.migration.deprecations({
|
||||
index: ".ml-anomalies-*",
|
||||
const response = await client.indices.unfreeze({
|
||||
index: "my-index-000001",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,31 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ilm.putLifecycle({
|
||||
name: "my_policy",
|
||||
policy: {
|
||||
phases: {
|
||||
hot: {
|
||||
actions: {
|
||||
rollover: {
|
||||
max_primary_shard_size: "50gb",
|
||||
},
|
||||
searchable_snapshot: {
|
||||
snapshot_repository: "backing_repo",
|
||||
replicate_for: "14d",
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
min_age: "28d",
|
||||
actions: {
|
||||
delete: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,11 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
index: {
|
||||
number_of_replicas: 0,
|
||||
},
|
||||
"index.search.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -6,7 +6,6 @@
|
||||
const response = await client.indices.resolveCluster({
|
||||
name: "not-present,clust*:my-index*,oldcluster:*",
|
||||
ignore_unavailable: "false",
|
||||
timeout: "5s",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -1,70 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "movies",
|
||||
size: 10,
|
||||
retriever: {
|
||||
rescorer: {
|
||||
rescore: {
|
||||
window_size: 50,
|
||||
query: {
|
||||
rescore_query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source:
|
||||
"cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0",
|
||||
params: {
|
||||
queryVector: [-0.5, 90, -10, 14.8, -156],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
retriever: {
|
||||
rrf: {
|
||||
rank_window_size: 100,
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "plot_embedding",
|
||||
inference_id: "my-elser-model",
|
||||
query: "films that explore psychological depths",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "crime",
|
||||
fields: ["plot", "title"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [10, 22, 77],
|
||||
k: 10,
|
||||
num_candidates: 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,23 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index",
|
||||
settings: {
|
||||
index: {
|
||||
number_of_shards: 3,
|
||||
"blocks.write": true,
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
field1: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
23
docs/doc_examples/38ba93890494bfa7beece58dffa44f98.asciidoc
Normal file
23
docs/doc_examples/38ba93890494bfa7beece58dffa44f98.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.bulk({
|
||||
index: "test-index",
|
||||
operations: [
|
||||
{
|
||||
update: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
doc: {
|
||||
infer_field: "updated inference field",
|
||||
source_field: "updated source field",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,19 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
size: 0,
|
||||
aggs: {
|
||||
job_ids: {
|
||||
terms: {
|
||||
field: "job_id",
|
||||
size: 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,61 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
linear: {
|
||||
retrievers: [
|
||||
{
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
function_score: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "ai",
|
||||
},
|
||||
},
|
||||
functions: [
|
||||
{
|
||||
script_score: {
|
||||
script: {
|
||||
source: "doc['timestamp'].value.millis",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
boost_mode: "replace",
|
||||
},
|
||||
},
|
||||
sort: {
|
||||
timestamp: {
|
||||
order: "asc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
weight: 2,
|
||||
normalizer: "minmax",
|
||||
},
|
||||
{
|
||||
retriever: {
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
weight: 1.5,
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,16 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.updateAliases({
|
||||
actions: [
|
||||
{
|
||||
remove_index: {
|
||||
index: "my-index-2099.05.06-000001",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
order_stats: {
|
||||
stats: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,11 +6,15 @@
|
||||
const response = await client.update({
|
||||
index: "test",
|
||||
id: 1,
|
||||
doc: {
|
||||
product_price: 100,
|
||||
script: {
|
||||
source: "ctx._source.counter += params.count",
|
||||
lang: "painless",
|
||||
params: {
|
||||
count: 4,
|
||||
},
|
||||
},
|
||||
upsert: {
|
||||
product_price: 50,
|
||||
counter: 1,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,47 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_inference/chat_completion/openai-completion/_stream",
|
||||
body: {
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "What's the price of a scarf?",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
tools: [
|
||||
{
|
||||
type: "function",
|
||||
function: {
|
||||
name: "get_current_price",
|
||||
description: "Get the current price of a item",
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: {
|
||||
item: {
|
||||
id: "123",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
tool_choice: {
|
||||
type: "function",
|
||||
function: {
|
||||
name: "get_current_price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,18 +3,15 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "image-index",
|
||||
const response = await client.knnSearch({
|
||||
index: "my-index",
|
||||
knn: {
|
||||
field: "image-vector",
|
||||
query_vector: [-5, 9, -12],
|
||||
field: "image_vector",
|
||||
query_vector: [0.3, 0.1, 1.2],
|
||||
k: 10,
|
||||
num_candidates: 100,
|
||||
rescore_vector: {
|
||||
oversample: 2,
|
||||
},
|
||||
},
|
||||
fields: ["title", "file-type"],
|
||||
_source: ["name", "file_type"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,16 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "jinaai-index",
|
||||
query: {
|
||||
semantic: {
|
||||
field: "content",
|
||||
query: "who inspired taking care of the sea?",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,10 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getSettings({
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,12 +4,16 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "jinaai-index",
|
||||
index: "semantic-embeddings",
|
||||
mappings: {
|
||||
properties: {
|
||||
content: {
|
||||
semantic_text: {
|
||||
type: "semantic_text",
|
||||
inference_id: "jinaai-embeddings",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
content: {
|
||||
type: "text",
|
||||
copy_to: "semantic_text",
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -11,7 +11,7 @@ const response = await client.ingest.putPipeline({
|
||||
attachment: {
|
||||
field: "data",
|
||||
properties: ["content", "title"],
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -1,15 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: "*",
|
||||
settings: {
|
||||
"index.search.slowlog.include.user": true,
|
||||
"index.search.slowlog.threshold.fetch.warn": "30s",
|
||||
"index.search.slowlog.threshold.query.warn": "30s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,16 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.reindex({
|
||||
wait_for_completion: "false",
|
||||
source: {
|
||||
index: ".ml-anomalies-custom-example",
|
||||
},
|
||||
dest: {
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,24 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
prefix: {
|
||||
full_name: {
|
||||
value: "ki",
|
||||
},
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
full_name: {
|
||||
matched_fields: ["full_name._index_prefix"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,33 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
daily_sales: {
|
||||
date_histogram: {
|
||||
field: "order_date",
|
||||
calendar_interval: "day",
|
||||
},
|
||||
aggs: {
|
||||
daily_revenue: {
|
||||
sum: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
smoothed_revenue: {
|
||||
moving_fn: {
|
||||
buckets_path: "daily_revenue",
|
||||
window: 3,
|
||||
script: "MovingFunctions.unweightedAvg(values)",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
26
docs/doc_examples/74b229a6e020113e5749099451979c89.asciidoc
Normal file
26
docs/doc_examples/74b229a6e020113e5749099451979c89.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "test-index",
|
||||
query: {
|
||||
nested: {
|
||||
path: "inference_field.inference.chunks",
|
||||
query: {
|
||||
knn: {
|
||||
field: "inference_field.inference.chunks.embeddings",
|
||||
query_vector_builder: {
|
||||
text_embedding: {
|
||||
model_id: "my_inference_id",
|
||||
model_text: "mountain lake",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,35 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
query: {
|
||||
intervals: {
|
||||
my_text: {
|
||||
all_of: {
|
||||
ordered: false,
|
||||
max_gaps: 1,
|
||||
intervals: [
|
||||
{
|
||||
match: {
|
||||
query: "my favorite food",
|
||||
max_gaps: 0,
|
||||
ordered: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
query: "cold porridge",
|
||||
max_gaps: 4,
|
||||
ordered: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,37 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
daily_sales: {
|
||||
date_histogram: {
|
||||
field: "order_date",
|
||||
calendar_interval: "day",
|
||||
format: "yyyy-MM-dd",
|
||||
},
|
||||
aggs: {
|
||||
revenue: {
|
||||
sum: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
unique_customers: {
|
||||
cardinality: {
|
||||
field: "customer_id",
|
||||
},
|
||||
},
|
||||
avg_basket_size: {
|
||||
avg: {
|
||||
field: "total_quantity",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,34 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_inference/chat_completion/openai-completion/_stream",
|
||||
body: {
|
||||
messages: [
|
||||
{
|
||||
role: "assistant",
|
||||
content: "Let's find out what the weather is",
|
||||
tool_calls: [
|
||||
{
|
||||
id: "call_KcAjWtAww20AihPHphUh46Gd",
|
||||
type: "function",
|
||||
function: {
|
||||
name: "get_current_weather",
|
||||
arguments: '{"location":"Boston, MA"}',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
role: "tool",
|
||||
content: "The weather is cold",
|
||||
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,8 +11,6 @@ const response = await client.indices.putSettings({
|
||||
"index.indexing.slowlog.threshold.index.debug": "2s",
|
||||
"index.indexing.slowlog.threshold.index.trace": "500ms",
|
||||
"index.indexing.slowlog.source": "1000",
|
||||
"index.indexing.slowlog.reformat": true,
|
||||
"index.indexing.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -7,14 +7,14 @@ const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
mappings: {
|
||||
properties: {
|
||||
infer_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
source_field: {
|
||||
type: "text",
|
||||
copy_to: "infer_field",
|
||||
},
|
||||
infer_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: ".elser-2-elasticsearch",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -1,12 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.query({
|
||||
query:
|
||||
'\nFROM library\n| EVAL year = DATE_EXTRACT("year", release_date)\n| WHERE page_count > ? AND match(author, ?, {"minimum_should_match": ?})\n| LIMIT 5\n',
|
||||
params: [300, "Frank Herbert", 2],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getAlias({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
const response = await client.security.queryRole({
|
||||
sort: ["name"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,39 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
categories: {
|
||||
terms: {
|
||||
field: "category.keyword",
|
||||
size: 5,
|
||||
order: {
|
||||
total_revenue: "desc",
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
total_revenue: {
|
||||
sum: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
avg_order_value: {
|
||||
avg: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
total_items: {
|
||||
sum: {
|
||||
field: "total_quantity",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,11 +5,16 @@
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "sparse_embedding",
|
||||
inference_id: "elser-model-eis",
|
||||
inference_id: "my-elser-endpoint",
|
||||
inference_config: {
|
||||
service: "elastic",
|
||||
service: "elser",
|
||||
service_settings: {
|
||||
model_name: "elser",
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 3,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
num_threads: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -1,42 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.bulk({
|
||||
index: "jinaai-index",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_index: "jinaai-index",
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
content:
|
||||
"Sarah Johnson is a talented marine biologist working at the Oceanographic Institute. Her groundbreaking research on coral reef ecosystems has garnered international attention and numerous accolades.",
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_index: "jinaai-index",
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
content:
|
||||
"She spends months at a time diving in remote locations, meticulously documenting the intricate relationships between various marine species. ",
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_index: "jinaai-index",
|
||||
_id: "3",
|
||||
},
|
||||
},
|
||||
{
|
||||
content:
|
||||
"Her dedication to preserving these delicate underwater environments has inspired a new generation of conservationists.",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -30,13 +30,6 @@ const response = await client.search({
|
||||
],
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
semantic_text: {
|
||||
number_of_fragments: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,30 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-byte",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "byte",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-rank-vectors-byte",
|
||||
id: 1,
|
||||
document: {
|
||||
my_vector: [
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -5,7 +5,7 @@
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"migrate.data_stream_reindex_max_request_per_second": 10000,
|
||||
"cluster.routing.allocation.disk.watermark.low": "30gb",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -10,8 +10,7 @@ const response = await client.inference.put({
|
||||
service: "openai",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
model_id: "text-embedding-3-small",
|
||||
dimensions: 128,
|
||||
model_id: "text-embedding-ada-002",
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -7,7 +7,7 @@ const response = await client.inference.put({
|
||||
task_type: "sparse_embedding",
|
||||
inference_id: "elser_embeddings",
|
||||
inference_config: {
|
||||
service: "elasticsearch",
|
||||
service: "elser",
|
||||
service_settings: {
|
||||
num_allocations: 1,
|
||||
num_threads: 1,
|
||||
@ -1,12 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cat.indices({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
v: "true",
|
||||
h: "index,store.size",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,12 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.get({
|
||||
index: ".migrated-ds-my-data-stream-2025.01.23-000001",
|
||||
human: "true",
|
||||
filter_path: "*.settings.index.version.created_string",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -14,7 +14,7 @@ const response = await client.ingest.putPipeline({
|
||||
attachment: {
|
||||
target_field: "_ingest._value.attachment",
|
||||
field: "_ingest._value.data",
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
avg_order_value: {
|
||||
avg: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,21 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
daily_orders: {
|
||||
date_histogram: {
|
||||
field: "order_date",
|
||||
calendar_interval: "day",
|
||||
format: "yyyy-MM-dd",
|
||||
min_doc_count: 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -12,7 +12,7 @@ const response = await client.ingest.putPipeline({
|
||||
field: "data",
|
||||
indexed_chars: 11,
|
||||
indexed_chars_field: "max_size",
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -1,12 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getSettings({
|
||||
index: "_all",
|
||||
expand_wildcards: "all",
|
||||
filter_path: "*.settings.index.*.slowlog",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,22 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
sales_by_category: {
|
||||
terms: {
|
||||
field: "category.keyword",
|
||||
size: 5,
|
||||
order: {
|
||||
_count: "desc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,31 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
daily_sales: {
|
||||
date_histogram: {
|
||||
field: "order_date",
|
||||
calendar_interval: "day",
|
||||
},
|
||||
aggs: {
|
||||
revenue: {
|
||||
sum: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
cumulative_revenue: {
|
||||
cumulative_sum: {
|
||||
buckets_path: "revenue",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,9 +5,6 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "retrievers_example_nested",
|
||||
settings: {
|
||||
number_of_shards: 1,
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
nested_field: {
|
||||
@ -21,9 +18,6 @@ const response = await client.indices.create({
|
||||
dims: 3,
|
||||
similarity: "l2_norm",
|
||||
index: true,
|
||||
index_options: {
|
||||
type: "flat",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1,22 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "rerank",
|
||||
inference_id: "jinaai-rerank",
|
||||
inference_config: {
|
||||
service: "jinaai",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
model_id: "jina-reranker-v2-base-multilingual",
|
||||
},
|
||||
task_settings: {
|
||||
top_n: 10,
|
||||
return_documents: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.query({
|
||||
query:
|
||||
'\nFROM library\n| WHERE match(author, "Frank Herbert", {"minimum_should_match": 2, "operator": "AND"})\n| LIMIT 5\n',
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,35 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
query: {
|
||||
intervals: {
|
||||
my_text: {
|
||||
all_of: {
|
||||
ordered: true,
|
||||
max_gaps: 1,
|
||||
intervals: [
|
||||
{
|
||||
match: {
|
||||
query: "my favorite food",
|
||||
max_gaps: 0,
|
||||
ordered: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
query: "cold porridge",
|
||||
max_gaps: 4,
|
||||
ordered: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,13 +6,13 @@
|
||||
const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
query: {
|
||||
match: {
|
||||
my_field: "Which country is Paris in?",
|
||||
semantic: {
|
||||
field: "my_semantic_field",
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_field: {
|
||||
my_semantic_field: {
|
||||
type: "semantic",
|
||||
number_of_fragments: 2,
|
||||
order: "score",
|
||||
@ -1,11 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cluster.state({
|
||||
metric: "metadata",
|
||||
filter_path: "metadata.indices.*.system",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,28 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "jinaai-index",
|
||||
retriever: {
|
||||
text_similarity_reranker: {
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
semantic: {
|
||||
field: "content",
|
||||
query: "who inspired taking care of the sea?",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
field: "content",
|
||||
rank_window_size: 100,
|
||||
inference_id: "jinaai-rerank",
|
||||
inference_text: "who inspired taking care of the sea?",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,44 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
linear: {
|
||||
retrievers: [
|
||||
{
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
query_string: {
|
||||
query: "(information retrieval) OR (artificial intelligence)",
|
||||
default_field: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
weight: 2,
|
||||
normalizer: "minmax",
|
||||
},
|
||||
{
|
||||
retriever: {
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
weight: 1.5,
|
||||
normalizer: "minmax",
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,17 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "chat_completion",
|
||||
inference_id: "chat-completion-endpoint",
|
||||
inference_config: {
|
||||
service: "elastic",
|
||||
service_settings: {
|
||||
model_id: "model-1",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,57 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.updateAliases({
|
||||
actions: [
|
||||
{
|
||||
add: {
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
alias: ".ml-anomalies-example1",
|
||||
filter: {
|
||||
term: {
|
||||
job_id: {
|
||||
value: "example1",
|
||||
},
|
||||
},
|
||||
},
|
||||
is_hidden: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
add: {
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
alias: ".ml-anomalies-example2",
|
||||
filter: {
|
||||
term: {
|
||||
job_id: {
|
||||
value: "example2",
|
||||
},
|
||||
},
|
||||
},
|
||||
is_hidden: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
remove: {
|
||||
index: ".ml-anomalies-custom-example",
|
||||
aliases: ".ml-anomalies-*",
|
||||
},
|
||||
},
|
||||
{
|
||||
remove_index: {
|
||||
index: ".ml-anomalies-custom-example",
|
||||
},
|
||||
},
|
||||
{
|
||||
add: {
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
alias: ".ml-anomalies-custom-example",
|
||||
is_hidden: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,13 +3,11 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
const response = await client.inference.inference({
|
||||
task_type: "my-inference-endpoint",
|
||||
inference_id: "_update",
|
||||
inference_config: {
|
||||
service_settings: {
|
||||
api_key: "<API_KEY>",
|
||||
},
|
||||
service_settings: {
|
||||
api_key: "<API_KEY>",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -4,11 +4,9 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
index: "my-index-000001",
|
||||
settings: {
|
||||
index: {
|
||||
number_of_replicas: "<original_number_of_replicas>",
|
||||
},
|
||||
"index.blocks.read_only_allow_delete": null,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -1,29 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-float",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-rank-vectors-float",
|
||||
id: 1,
|
||||
document: {
|
||||
my_vector: [
|
||||
[0.5, 10, 6],
|
||||
[-0.5, 10, 10],
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -5,9 +5,6 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "retrievers_example",
|
||||
settings: {
|
||||
number_of_shards: 1,
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
vector: {
|
||||
@ -15,9 +12,6 @@ const response = await client.indices.create({
|
||||
dims: 3,
|
||||
similarity: "l2_norm",
|
||||
index: true,
|
||||
index_options: {
|
||||
type: "flat",
|
||||
},
|
||||
},
|
||||
text: {
|
||||
type: "text",
|
||||
@ -28,9 +22,6 @@ const response = await client.indices.create({
|
||||
topic: {
|
||||
type: "keyword",
|
||||
},
|
||||
timestamp: {
|
||||
type: "date",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -44,7 +35,6 @@ const response1 = await client.index({
|
||||
text: "Large language models are revolutionizing information retrieval by boosting search precision, deepening contextual understanding, and reshaping user experiences in data-rich environments.",
|
||||
year: 2024,
|
||||
topic: ["llm", "ai", "information_retrieval"],
|
||||
timestamp: "2021-01-01T12:10:30",
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
@ -57,7 +47,6 @@ const response2 = await client.index({
|
||||
text: "Artificial intelligence is transforming medicine, from advancing diagnostics and tailoring treatment plans to empowering predictive patient care for improved health outcomes.",
|
||||
year: 2023,
|
||||
topic: ["ai", "medicine"],
|
||||
timestamp: "2022-01-01T12:10:30",
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
@ -70,7 +59,6 @@ const response3 = await client.index({
|
||||
text: "AI is redefining security by enabling advanced threat detection, proactive risk analysis, and dynamic defenses against increasingly sophisticated cyber threats.",
|
||||
year: 2024,
|
||||
topic: ["ai", "security"],
|
||||
timestamp: "2023-01-01T12:10:30",
|
||||
},
|
||||
});
|
||||
console.log(response3);
|
||||
@ -83,7 +71,6 @@ const response4 = await client.index({
|
||||
text: "Elastic introduces Elastic AI Assistant, the open, generative AI sidekick powered by ESRE to democratize cybersecurity and enable users of every skill level.",
|
||||
year: 2023,
|
||||
topic: ["ai", "elastic", "assistant"],
|
||||
timestamp: "2024-01-01T12:10:30",
|
||||
},
|
||||
});
|
||||
console.log(response4);
|
||||
@ -96,7 +83,6 @@ const response5 = await client.index({
|
||||
text: "Learn how to spin up a deployment of our hosted Elasticsearch Service and use Elastic Observability to gain deeper insight into the behavior of your applications and systems.",
|
||||
year: 2024,
|
||||
topic: ["documentation", "observability", "elastic"],
|
||||
timestamp: "2025-01-01T12:10:30",
|
||||
},
|
||||
});
|
||||
console.log(response5);
|
||||
@ -12,7 +12,7 @@ const response = await client.ingest.putPipeline({
|
||||
field: "data",
|
||||
indexed_chars: 11,
|
||||
indexed_chars_field: "max_size",
|
||||
remove_binary: true,
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -12,13 +12,6 @@ const response = await client.search({
|
||||
fields: ["my_field", "my_field._2gram", "my_field._3gram"],
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_field: {
|
||||
matched_fields: ["my_field._index_prefix"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,18 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "text_embedding",
|
||||
inference_id: "jinaai-embeddings",
|
||||
inference_config: {
|
||||
service: "jinaai",
|
||||
service_settings: {
|
||||
model_id: "jina-embeddings-v3",
|
||||
api_key: "<api_key>",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -97,7 +97,7 @@ client.diagnostic.on('request', (err, result) => {
|
||||
----
|
||||
|
||||
|`deserialization`
|
||||
a|Emitted before starting deserialization and decompression. If you want to measure this phase duration, you should measure the time elapsed between this event and `response`. This event might not be emitted in certain situations, like: when `asStream` is set to true; a response is terminated early due to content length being too large; or a response is terminated early by an `AbortController`.
|
||||
a|Emitted before starting deserialization and decompression. If you want to measure this phase duration, you should measure the time elapsed between this event and `response`. _(This event might not be emitted in certain situations)_.
|
||||
[source,js]
|
||||
----
|
||||
client.diagnostic.on('deserialization', (err, result) => {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
11
package.json
11
package.json
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@elastic/elasticsearch",
|
||||
"version": "8.18.1",
|
||||
"versionCanary": "8.18.1-canary.0",
|
||||
"version": "9.0.0-alpha.1",
|
||||
"versionCanary": "9.0.0-canary.0",
|
||||
"description": "The official Elasticsearch client for Node.js",
|
||||
"main": "./index.js",
|
||||
"types": "index.d.ts",
|
||||
@ -60,10 +60,11 @@
|
||||
"@sinonjs/fake-timers": "github:sinonjs/fake-timers#48f089f",
|
||||
"@types/debug": "4.1.12",
|
||||
"@types/ms": "0.7.34",
|
||||
"@types/node": "22.10.7",
|
||||
"@types/node": "22.10.1",
|
||||
"@types/sinonjs__fake-timers": "8.1.5",
|
||||
"@types/split2": "4.2.3",
|
||||
"@types/stoppable": "1.1.3",
|
||||
"@types/tap": "15.0.12",
|
||||
"chai": "5.1.2",
|
||||
"cross-zip": "4.0.1",
|
||||
"desm": "1.3.1",
|
||||
@ -83,13 +84,13 @@
|
||||
"tap": "21.0.1",
|
||||
"ts-node": "10.9.2",
|
||||
"ts-standard": "12.0.2",
|
||||
"typescript": "5.7.3",
|
||||
"typescript": "5.7.2",
|
||||
"workq": "3.0.0",
|
||||
"xmlbuilder2": "3.1.1",
|
||||
"zx": "7.2.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@elastic/transport": "^8.9.6",
|
||||
"@elastic/transport": "^8.9.1",
|
||||
"apache-arrow": "^18.0.0",
|
||||
"tslib": "^2.4.0"
|
||||
},
|
||||
|
||||
29
renovate.json
Normal file
29
renovate.json
Normal file
@ -0,0 +1,29 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"local>elastic/renovate-config"
|
||||
],
|
||||
"schedule": [
|
||||
"* * * * 0"
|
||||
],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchDepTypes": [
|
||||
"devDependencies"
|
||||
],
|
||||
"automerge": true,
|
||||
"labels": [
|
||||
"backport 8.x"
|
||||
]
|
||||
},
|
||||
{
|
||||
"matchPackageNames": [
|
||||
"node"
|
||||
],
|
||||
"matchManagers": [
|
||||
"dockerfile"
|
||||
],
|
||||
"enabled": false
|
||||
}
|
||||
]
|
||||
}
|
||||
54
scripts/codemod/drop-body.test.ts
Normal file
54
scripts/codemod/drop-body.test.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { Client } from '../..'
|
||||
|
||||
const client = new Client({
|
||||
node: 'http://localhost:9200',
|
||||
auth: { username: 'elastic', password: 'changeme' }
|
||||
})
|
||||
|
||||
async function doThings () {
|
||||
// should get fixed by codemod
|
||||
await client.closePointInTime({
|
||||
body: {
|
||||
id: 'foobar'
|
||||
}
|
||||
})
|
||||
|
||||
await client.asyncSearch.get({
|
||||
// @ts-expect-error should get fixed by codemod
|
||||
body: {
|
||||
id: 'foo'
|
||||
}
|
||||
})
|
||||
|
||||
// @ts-expect-error should get fixed by codemod
|
||||
await client.create({
|
||||
id: 'foo',
|
||||
body: { index: 'my-index' }
|
||||
})
|
||||
|
||||
await client.watcher.putWatch({
|
||||
id: 'foo',
|
||||
active: true
|
||||
})
|
||||
|
||||
const body = { id: 'foo' }
|
||||
// @ts-expect-error should get fixed by codemod
|
||||
await client.asyncSearch.get({ body })
|
||||
await client.asyncSearch.get(body)
|
||||
|
||||
const request = { body }
|
||||
// @ts-expect-error should get fixed by codemod
|
||||
await client.asyncSearch.get(request)
|
||||
|
||||
const request2 = body
|
||||
await client.closePointInTime(request2)
|
||||
|
||||
// some non-client calls
|
||||
const x = Math.random()
|
||||
console.log(x)
|
||||
console.log({ body: 'foo' })
|
||||
}
|
||||
|
||||
doThings()
|
||||
.then(() => console.log('done'))
|
||||
.catch(() => console.error('uh oh'))
|
||||
172
scripts/codemod/drop-body.ts
Normal file
172
scripts/codemod/drop-body.ts
Normal file
@ -0,0 +1,172 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License") you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
import ts from 'typescript'
|
||||
import path from 'node:path'
|
||||
import minimist from 'minimist'
|
||||
|
||||
const apis = [
|
||||
'asyncSearch',
|
||||
'autoscaling',
|
||||
'bulk',
|
||||
'capabilities',
|
||||
'cat',
|
||||
'ccr',
|
||||
'clearScroll',
|
||||
'closePointInTime',
|
||||
'cluster',
|
||||
'connector',
|
||||
'count',
|
||||
'create',
|
||||
'danglingIndices',
|
||||
'delete',
|
||||
'deleteByQuery',
|
||||
'deleteByQueryRethrottle',
|
||||
'deleteScript',
|
||||
'enrich',
|
||||
'eql',
|
||||
'esql',
|
||||
'exists',
|
||||
'existsSource',
|
||||
'explain',
|
||||
'features',
|
||||
'fieldCaps',
|
||||
]
|
||||
|
||||
/**
|
||||
* Detects whether a node is a `Client` instance identifier
|
||||
* @remarks Uses duck-typing by checking that several Elasticsearch APIs exist as members on the identifier
|
||||
*/
|
||||
function isClient(node: ts.Identifier) {
|
||||
const type = checker.getTypeAtLocation(node)
|
||||
const properties = type.getProperties().map(prop => prop.escapedName.toString())
|
||||
|
||||
for (const api of apis) {
|
||||
if (!properties.includes(api)) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the call expression node is running a client API function, otherwise false
|
||||
*/
|
||||
function isClientExpression(node: ts.CallExpression): boolean {
|
||||
let flag = false
|
||||
function visitIdentifiers(node: ts.Node) {
|
||||
if (ts.isIdentifier(node) && isClient(node)) {
|
||||
flag = true
|
||||
return
|
||||
}
|
||||
ts.forEachChild(node, visitIdentifiers)
|
||||
}
|
||||
visitIdentifiers(node)
|
||||
return flag
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of all call expressions to `Client` functions
|
||||
*/
|
||||
function collectClientCallExpressions(node: ts.SourceFile): ts.CallExpression[] {
|
||||
const clientExpressions: ts.CallExpression[] = []
|
||||
|
||||
// recurse through all child nodes looking for `Client` call expressions
|
||||
function collect(node: ts.Node) {
|
||||
if (ts.isCallExpression(node)) {
|
||||
// look for client identifier
|
||||
if (isClientExpression(node)) {
|
||||
clientExpressions.push(node)
|
||||
}
|
||||
}
|
||||
|
||||
ts.forEachChild(node, collect)
|
||||
}
|
||||
|
||||
ts.forEachChild(node, collect)
|
||||
|
||||
return clientExpressions
|
||||
}
|
||||
|
||||
function fixBodyProp(sourceFile: ts.SourceFile, node: ts.Node) {
|
||||
if (ts.isObjectLiteralExpression(node)) {
|
||||
// @ts-expect-error need to cast `prop` to a more specific type
|
||||
const prop = node.properties.find(prop => prop.name.escapedText === 'body')
|
||||
if (prop != null) {
|
||||
console.log('// needs fix:')
|
||||
console.log(sourceFile?.text.slice(node.pos, node.end))
|
||||
|
||||
// TODO: fix { body: value }
|
||||
// TODO: fix { body: { ... } }
|
||||
// TODO: fix { body }
|
||||
}
|
||||
} else if (ts.isIdentifier(node)) {
|
||||
// @ts-expect-error
|
||||
if (node.flowNode.antecedent?.node != null) {
|
||||
// @ts-expect-error
|
||||
fixBodyProp(sourceFile, node.flowNode.antecedent.node)
|
||||
} else {
|
||||
// console.log('uh oh')
|
||||
// console.log(sourceFile?.text.slice(node.pos, node.end))
|
||||
}
|
||||
} else {
|
||||
// @ts-expect-error
|
||||
if (node.flowNode?.antecedent?.node != null) {
|
||||
// console.log('two')
|
||||
// @ts-expect-error
|
||||
fixBodyProp(sourceFile, node.flowNode.antecedent.node)
|
||||
} else {
|
||||
// console.log('something else')
|
||||
// console.log(node.kind)
|
||||
// console.log(sourceFile?.text.slice(node.pos, node.end))
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function lookForBodyProp(sourceFile: ts.SourceFile, node: ts.CallExpression) {
|
||||
if (node.arguments.length === 0) return
|
||||
const first = node.arguments[0]
|
||||
fixBodyProp(sourceFile, first)
|
||||
}
|
||||
|
||||
// build TS project from provided file names
|
||||
const args = minimist(process.argv.slice(2))
|
||||
const cwd = process.cwd()
|
||||
const files = args._.map(file => path.join(cwd, file))
|
||||
const program = ts.createProgram(files, {})
|
||||
const checker = program.getTypeChecker()
|
||||
|
||||
let processed = 0
|
||||
program.getSourceFiles().forEach(sourceFile => {
|
||||
if (program.isSourceFileFromExternalLibrary(sourceFile)) return
|
||||
const { fileName } = sourceFile
|
||||
|
||||
try {
|
||||
// get all `Client` call expressions
|
||||
const exprs = collectClientCallExpressions(sourceFile)
|
||||
if (exprs.length > 0) {
|
||||
console.log(`found ${exprs.length} Client expressions in ${fileName}`)
|
||||
}
|
||||
// for each call expression, get the first function argument, determine if it's an object and whether it has a `body` key
|
||||
exprs.forEach(expr => lookForBodyProp(sourceFile, expr))
|
||||
} catch (e) {
|
||||
// continue
|
||||
console.error(`Could not process ${fileName}: ${e}`)
|
||||
}
|
||||
processed++
|
||||
})
|
||||
console.log(`Done scanning ${processed} files`)
|
||||
@ -91,7 +91,6 @@ ${source.trim()}
|
||||
}
|
||||
|
||||
const options = minimist(process.argv.slice(2), {
|
||||
boolean: ['debug'],
|
||||
string: ['version'],
|
||||
default: {
|
||||
version: 'master'
|
||||
@ -103,7 +102,7 @@ generate(options.version)
|
||||
.catch(err => log.fail(err.message))
|
||||
.finally(() => {
|
||||
const keys = Object.keys(failures)
|
||||
if (keys.length > 0 && options.debug) {
|
||||
if (keys.length > 0) {
|
||||
let message = 'Some examples failed to generate:\n\n'
|
||||
for (const key of keys) {
|
||||
message += `${key}: ${failures[key]}\n`
|
||||
|
||||
@ -1,143 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { join } = require('path')
|
||||
const { readdirSync, writeFileSync, readFileSync } = require('fs')
|
||||
const minimist = require('minimist')
|
||||
const ora = require('ora')
|
||||
const rimraf = require('rimraf')
|
||||
const standard = require('standard')
|
||||
const downloadArtifacts = require('./download-artifacts')
|
||||
const {
|
||||
generate,
|
||||
genFactory,
|
||||
generateDocs,
|
||||
generateRequestTypes
|
||||
} = require('./utils')
|
||||
|
||||
start(minimist(process.argv.slice(2), {
|
||||
string: ['version', 'hash']
|
||||
}))
|
||||
|
||||
function start (opts) {
|
||||
if (opts.version == null) {
|
||||
console.error('Missing version parameter')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
const packageFolder = join(__dirname, '..', 'api')
|
||||
const apiOutputFolder = join(packageFolder, 'api')
|
||||
const mainOutputFile = join(packageFolder, 'index.js')
|
||||
const docOutputFile = join(__dirname, '..', 'docs', 'reference.asciidoc')
|
||||
const typeDefFile = join(__dirname, '..', 'index.d.ts')
|
||||
const requestParamsOutputFile = join(packageFolder, 'requestParams.d.ts')
|
||||
|
||||
let log
|
||||
downloadArtifacts({ version: opts.version, hash: opts.hash })
|
||||
.then(onArtifactsDownloaded)
|
||||
.catch(err => {
|
||||
console.log(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
function onArtifactsDownloaded () {
|
||||
log = ora('Generating APIs').start()
|
||||
|
||||
log.text = 'Cleaning API folder...'
|
||||
rimraf.sync(join(apiOutputFolder, '*.js'))
|
||||
|
||||
const allSpec = readdirSync(downloadArtifacts.locations.specFolder)
|
||||
.filter(file => file !== '_common.json')
|
||||
.filter(file => !file.includes('deprecated'))
|
||||
.sort()
|
||||
.map(file => require(join(downloadArtifacts.locations.specFolder, file)))
|
||||
|
||||
const namespaces = namespacify(readdirSync(downloadArtifacts.locations.specFolder))
|
||||
for (const namespace in namespaces) {
|
||||
if (namespace === '_common') continue
|
||||
const code = generate(namespace, namespaces[namespace], downloadArtifacts.locations.specFolder, opts.version)
|
||||
const filePath = join(apiOutputFolder, `${namespace}.js`)
|
||||
writeFileSync(filePath, code, { encoding: 'utf8' })
|
||||
}
|
||||
|
||||
writeFileSync(
|
||||
requestParamsOutputFile,
|
||||
generateRequestTypes(opts.version, allSpec),
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
const { fn: factory, types } = genFactory(apiOutputFolder, downloadArtifacts.locations.specFolder, namespaces)
|
||||
writeFileSync(
|
||||
mainOutputFile,
|
||||
factory,
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
const oldTypeDefString = readFileSync(typeDefFile, 'utf8')
|
||||
const start = oldTypeDefString.indexOf('/* GENERATED */')
|
||||
const end = oldTypeDefString.indexOf('/* /GENERATED */')
|
||||
const newTypeDefString = oldTypeDefString.slice(0, start + 15) + '\n' + types + '\n ' + oldTypeDefString.slice(end)
|
||||
writeFileSync(
|
||||
typeDefFile,
|
||||
newTypeDefString,
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
lintFiles(log, () => {
|
||||
log.text = 'Generating documentation'
|
||||
writeFileSync(
|
||||
docOutputFile,
|
||||
generateDocs(require(join(downloadArtifacts.locations.specFolder, '_common.json')), allSpec),
|
||||
{ encoding: 'utf8' }
|
||||
)
|
||||
|
||||
log.succeed('Done!')
|
||||
})
|
||||
}
|
||||
|
||||
function lintFiles (log, cb) {
|
||||
log.text = 'Linting...'
|
||||
const files = [join(packageFolder, '*.js'), join(apiOutputFolder, '*.js')]
|
||||
standard.lintFiles(files, { fix: true }, err => {
|
||||
if (err) {
|
||||
return log.fail(err.message)
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
||||
function namespacify (apis) {
|
||||
return apis
|
||||
.map(api => api.slice(0, -5))
|
||||
.filter(api => api !== '_common')
|
||||
.filter(api => !api.includes('deprecated'))
|
||||
.reduce((acc, val) => {
|
||||
if (val.includes('.')) {
|
||||
val = val.split('.')
|
||||
acc[val[0]] = acc[val[0]] || []
|
||||
acc[val[0]].push(val[1])
|
||||
} else {
|
||||
acc[val] = []
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
}
|
||||
@ -1,8 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
exec docker run \
|
||||
--rm \
|
||||
-e ELASTICSEARCH_URL="http://elasticsearch:9200" \
|
||||
-p 5601:5601 \
|
||||
--network=elastic \
|
||||
docker.elastic.co/kibana/kibana:7.0.0-beta1
|
||||
@ -1,139 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
const { accessSync, mkdirSync } = require('fs')
|
||||
const { join } = require('path')
|
||||
const Git = require('simple-git')
|
||||
|
||||
const esRepo = 'https://github.com/elastic/elasticsearch.git'
|
||||
const esFolder = join(__dirname, '..', '..', 'elasticsearch')
|
||||
const apiFolder = join(esFolder, 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'api')
|
||||
const xPackFolder = join(esFolder, 'x-pack', 'plugin', 'src', 'test', 'resources', 'rest-api-spec', 'api')
|
||||
|
||||
function cloneAndCheckout (opts, callback) {
|
||||
const { log, tag, branch } = opts
|
||||
withTag(tag, callback)
|
||||
|
||||
/**
|
||||
* Sets the elasticsearch repository to the given tag.
|
||||
* If the repository is not present in `esFolder` it will
|
||||
* clone the repository and the checkout the tag.
|
||||
* If the repository is already present but it cannot checkout to
|
||||
* the given tag, it will perform a pull and then try again.
|
||||
* @param {string} tag
|
||||
* @param {function} callback
|
||||
*/
|
||||
function withTag (tag, callback) {
|
||||
let fresh = false
|
||||
let retry = 0
|
||||
|
||||
if (!pathExist(esFolder)) {
|
||||
if (!createFolder(esFolder)) {
|
||||
log.fail('Failed folder creation')
|
||||
return
|
||||
}
|
||||
fresh = true
|
||||
}
|
||||
|
||||
const git = Git(esFolder)
|
||||
|
||||
if (fresh) {
|
||||
clone(checkout)
|
||||
} else if (opts.branch) {
|
||||
checkout(true)
|
||||
} else {
|
||||
checkout()
|
||||
}
|
||||
|
||||
function checkout (alsoPull = false) {
|
||||
if (branch) {
|
||||
log.text = `Checking out branch '${branch}'`
|
||||
} else {
|
||||
log.text = `Checking out tag '${tag}'`
|
||||
}
|
||||
git.checkout(branch || tag, err => {
|
||||
if (err) {
|
||||
if (retry++ > 0) {
|
||||
callback(new Error(`Cannot checkout tag '${tag}'`), { apiFolder, xPackFolder })
|
||||
return
|
||||
}
|
||||
return pull(checkout)
|
||||
}
|
||||
if (alsoPull) {
|
||||
return pull(checkout)
|
||||
}
|
||||
callback(null, { apiFolder, xPackFolder })
|
||||
})
|
||||
}
|
||||
|
||||
function pull (cb) {
|
||||
log.text = 'Pulling elasticsearch repository...'
|
||||
git.pull(err => {
|
||||
if (err) {
|
||||
callback(err, { apiFolder, xPackFolder })
|
||||
return
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
|
||||
function clone (cb) {
|
||||
log.text = 'Cloning elasticsearch repository...'
|
||||
git.clone(esRepo, esFolder, err => {
|
||||
if (err) {
|
||||
callback(err, { apiFolder, xPackFolder })
|
||||
return
|
||||
}
|
||||
cb()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given path exists
|
||||
* @param {string} path
|
||||
* @returns {boolean} true if exists, false if not
|
||||
*/
|
||||
function pathExist (path) {
|
||||
try {
|
||||
accessSync(path)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the given folder
|
||||
* @param {string} name
|
||||
* @returns {boolean} true on success, false on failure
|
||||
*/
|
||||
function createFolder (name) {
|
||||
try {
|
||||
mkdirSync(name)
|
||||
return true
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = cloneAndCheckout
|
||||
@ -1,553 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
|
||||
'use strict'
|
||||
|
||||
const { join } = require('path')
|
||||
const dedent = require('dedent')
|
||||
const allowedMethods = {
|
||||
noBody: ['GET', 'HEAD', 'DELETE'],
|
||||
body: ['POST', 'PUT', 'DELETE']
|
||||
}
|
||||
|
||||
// if a parameter is depracted in a minor release
|
||||
// we should be able to support it until the next major
|
||||
const deprecatedParameters = require('./patch.json')
|
||||
|
||||
// list of apis that does not need any kind of validation
|
||||
// because of how the url is built or the `type` handling in ES7
|
||||
const noPathValidation = [
|
||||
'create',
|
||||
'exists',
|
||||
'explain',
|
||||
'get',
|
||||
'get_source',
|
||||
'index',
|
||||
'indices.get_alias',
|
||||
'indices.exists_alias',
|
||||
'indices.get_field_mapping',
|
||||
'indices.get_mapping',
|
||||
'indices.get_settings',
|
||||
'indices.put_mapping',
|
||||
'indices.stats',
|
||||
'delete',
|
||||
'nodes.info',
|
||||
'nodes.stats',
|
||||
'nodes.usage',
|
||||
'tasks.cancel',
|
||||
'termvectors',
|
||||
'update'
|
||||
]
|
||||
|
||||
function generateNamespace (namespace, nested, specFolder, version) {
|
||||
const common = require(join(specFolder, '_common.json'))
|
||||
let code = dedent`
|
||||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
'use strict'
|
||||
|
||||
/* eslint camelcase: 0 */
|
||||
/* eslint no-unused-vars: 0 */
|
||||
|
||||
const { handleError, snakeCaseKeys, normalizeArguments, kConfigurationError } = require('../utils')
|
||||
`
|
||||
if (nested.length > 0) {
|
||||
let getters = ''
|
||||
for (const n of nested) {
|
||||
if (n.includes('_')) {
|
||||
const nameSnaked = n
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
getters += `${n}: { get () { return this.${nameSnaked} } },\n`
|
||||
}
|
||||
}
|
||||
const api = generateMultiApi(version, namespace, nested, common, specFolder)
|
||||
if (getters.length > 0) {
|
||||
getters = `Object.defineProperties(${api.namespace}Api.prototype, {\n${getters}})`
|
||||
}
|
||||
|
||||
code += `
|
||||
const acceptedQuerystring = ${JSON.stringify(api.acceptedQuerystring)}
|
||||
const snakeCase = ${JSON.stringify(api.snakeCase)}
|
||||
|
||||
function ${api.namespace}Api (transport, ConfigurationError) {
|
||||
this.transport = transport
|
||||
this[kConfigurationError] = ConfigurationError
|
||||
}
|
||||
|
||||
${api.code}
|
||||
|
||||
${getters}
|
||||
|
||||
module.exports = ${api.namespace}Api
|
||||
`
|
||||
} else {
|
||||
const spec = require(join(specFolder, `${namespace}.json`))
|
||||
const api = generateSingleApi(version, spec, common)
|
||||
code += `
|
||||
const acceptedQuerystring = ${JSON.stringify(api.acceptedQuerystring)}
|
||||
const snakeCase = ${JSON.stringify(api.snakeCase)}
|
||||
|
||||
${api.code}
|
||||
|
||||
module.exports = ${api.name}Api
|
||||
`
|
||||
}
|
||||
return code
|
||||
}
|
||||
|
||||
function generateMultiApi (version, namespace, nested, common, specFolder) {
|
||||
const namespaceSnaked = namespace
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
let code = ''
|
||||
const snakeCase = {}
|
||||
const acceptedQuerystring = []
|
||||
for (const n of nested) {
|
||||
const nameSnaked = n
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
const spec = require(join(specFolder, `${namespace}.${n}.json`))
|
||||
const api = generateSingleApi(version, spec, common)
|
||||
code += `${Uppercase(namespaceSnaked)}Api.prototype.${nameSnaked} = ${api.code}\n\n`
|
||||
Object.assign(snakeCase, api.snakeCase)
|
||||
for (const q of api.acceptedQuerystring) {
|
||||
if (!acceptedQuerystring.includes(q)) {
|
||||
acceptedQuerystring.push(q)
|
||||
}
|
||||
}
|
||||
}
|
||||
return { code, snakeCase, acceptedQuerystring, namespace: Uppercase(namespaceSnaked) }
|
||||
}
|
||||
|
||||
function generateSingleApi (version, spec, common) {
|
||||
const release = version.charAt(0)
|
||||
const api = Object.keys(spec)[0]
|
||||
const name = api
|
||||
.replace(/\.([a-z])/g, k => k[1].toUpperCase())
|
||||
.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
const { paths } = spec[api].url
|
||||
const { params } = spec[api]
|
||||
const acceptedQuerystring = []
|
||||
const required = []
|
||||
|
||||
const methods = paths.reduce((acc, val) => {
|
||||
for (const method of val.methods) {
|
||||
if (!acc.includes(method)) acc.push(method)
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
const parts = paths.reduce((acc, val) => {
|
||||
if (!val.parts) return acc
|
||||
for (const part of Object.keys(val.parts)) {
|
||||
if (!acc.includes(part)) acc.push(part)
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
// get the required parts from the url
|
||||
// if the url has at least one static path,
|
||||
// then there are not required parts of the url
|
||||
let allParts = []
|
||||
for (const path of paths) {
|
||||
if (path.parts) {
|
||||
allParts.push(Object.keys(path.parts))
|
||||
} else {
|
||||
allParts = []
|
||||
break
|
||||
}
|
||||
}
|
||||
if (allParts.length > 0) {
|
||||
intersect(...allParts).forEach(r => required.push(r))
|
||||
}
|
||||
|
||||
for (const key in params) {
|
||||
if (params[key].required) {
|
||||
required.push(key)
|
||||
}
|
||||
|
||||
acceptedQuerystring.push(key)
|
||||
if (deprecatedParameters[release] && deprecatedParameters[release][key]) {
|
||||
acceptedQuerystring.push(deprecatedParameters[release][key])
|
||||
}
|
||||
}
|
||||
|
||||
for (const key in spec[api]) {
|
||||
const k = spec[api][key]
|
||||
if (k && k.required) {
|
||||
required.push(key)
|
||||
}
|
||||
}
|
||||
if (common && common.params) {
|
||||
for (const key in common.params) {
|
||||
acceptedQuerystring.push(key)
|
||||
}
|
||||
}
|
||||
|
||||
const code = `
|
||||
function ${name}Api (params, options, callback) {
|
||||
;[params, options, callback] = normalizeArguments(params, options, callback)
|
||||
|
||||
${genRequiredChecks()}
|
||||
|
||||
${genUrlValidation(paths, api)}
|
||||
|
||||
let { ${genQueryDenylist(false)}, ...querystring } = params
|
||||
querystring = snakeCaseKeys(acceptedQuerystring, snakeCase, querystring)
|
||||
|
||||
let path = ''
|
||||
${buildPath()}
|
||||
|
||||
// build request object
|
||||
const request = {
|
||||
method,
|
||||
path,
|
||||
${genBody(api, methods, spec[api].body, spec)}
|
||||
querystring
|
||||
}
|
||||
|
||||
return this.transport.request(request, options, callback)
|
||||
}
|
||||
`.trim() // always call trim to avoid newlines
|
||||
|
||||
return {
|
||||
name,
|
||||
code,
|
||||
acceptedQuerystring: acceptedQuerystring,
|
||||
snakeCase: genSnakeCaseMap(),
|
||||
documentation: generateDocumentation(spec[api], api)
|
||||
}
|
||||
|
||||
function genRequiredChecks () {
|
||||
const code = required
|
||||
.map(_genRequiredCheck)
|
||||
.concat(_noBody())
|
||||
.filter(Boolean)
|
||||
|
||||
if (code.length) {
|
||||
code.unshift('// check required parameters')
|
||||
}
|
||||
|
||||
return code.join('\n ')
|
||||
|
||||
function _genRequiredCheck (param) {
|
||||
const camelCased = param[0] === '_'
|
||||
? '_' + param.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: param.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
if (param === camelCased) {
|
||||
const check = `
|
||||
if (params['${param}'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: ${param}')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
`
|
||||
return check.trim()
|
||||
} else {
|
||||
const check = `
|
||||
if (params['${param}'] == null && params['${camelCased}'] == null) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter: ${param} or ${camelCased}')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
`
|
||||
return check.trim()
|
||||
}
|
||||
}
|
||||
|
||||
function _noBody () {
|
||||
const check = `
|
||||
if (params.body != null) {
|
||||
const err = new this[kConfigurationError]('This API does not require a body')
|
||||
return handleError(err, callback)
|
||||
}
|
||||
`
|
||||
return spec[api].body === null ? check.trim() : ''
|
||||
}
|
||||
}
|
||||
|
||||
function genSnakeCaseMap () {
|
||||
const toCamelCase = str => {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
return acceptedQuerystring.reduce((acc, val, index) => {
|
||||
if (toCamelCase(val) !== val) {
|
||||
acc[toCamelCase(val)] = val
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
function genQueryDenylist (addQuotes = true) {
|
||||
const toCamelCase = str => {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
const denylist = ['method', 'body']
|
||||
parts.forEach(p => {
|
||||
const camelStr = toCamelCase(p)
|
||||
if (camelStr !== p) denylist.push(`${camelStr}`)
|
||||
denylist.push(`${p}`)
|
||||
})
|
||||
return addQuotes ? denylist.map(q => `'${q}'`) : denylist
|
||||
}
|
||||
|
||||
function buildPath () {
|
||||
const toCamelCase = str => {
|
||||
return str[0] === '_'
|
||||
? '_' + str.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: str.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
}
|
||||
|
||||
const genAccessKey = str => {
|
||||
const camelStr = toCamelCase(str)
|
||||
return camelStr === str
|
||||
? str
|
||||
: `${str} || ${camelStr}`
|
||||
}
|
||||
|
||||
const genCheck = path => {
|
||||
return path
|
||||
.split('/')
|
||||
.filter(Boolean)
|
||||
.map(p => p.startsWith('{') ? `(${genAccessKey(p.slice(1, -1))}) != null` : false)
|
||||
.filter(Boolean)
|
||||
.join(' && ')
|
||||
}
|
||||
|
||||
const genPath = path => {
|
||||
path = path
|
||||
.split('/')
|
||||
.filter(Boolean)
|
||||
.map(p => p.startsWith('{') ? `encodeURIComponent(${genAccessKey(p.slice(1, -1))})` : `'${p}'`)
|
||||
.join(' + \'/\' + ')
|
||||
return path.length > 0 ? ('\'/\' + ' + path) : '\'/\''
|
||||
}
|
||||
|
||||
let hasStaticPath = false
|
||||
let sortedPaths = paths
|
||||
// some legacy API have mutliple statis paths
|
||||
// this filter removes them
|
||||
.filter(p => {
|
||||
if (p.path.includes('{')) return true
|
||||
if (hasStaticPath === false && p.deprecated == null) {
|
||||
hasStaticPath = true
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
// sort by number of parameters (desc)
|
||||
.sort((a, b) => Object.keys(b.parts || {}).length - Object.keys(a.parts || {}).length)
|
||||
|
||||
const allDeprecated = paths.filter(path => path.deprecated != null)
|
||||
if (allDeprecated.length === paths.length) sortedPaths = [paths[0]]
|
||||
|
||||
let code = ''
|
||||
for (let i = 0; i < sortedPaths.length; i++) {
|
||||
const { path, methods } = sortedPaths[i]
|
||||
if (sortedPaths.length === 1) {
|
||||
code += `if (method == null) method = ${generatePickMethod(methods)}
|
||||
path = ${genPath(path)}
|
||||
`
|
||||
} else if (i === 0) {
|
||||
code += `if (${genCheck(path)}) {
|
||||
if (method == null) method = ${generatePickMethod(methods)}
|
||||
path = ${genPath(path)}
|
||||
}
|
||||
`
|
||||
} else if (i === sortedPaths.length - 1) {
|
||||
code += ` else {
|
||||
if (method == null) method = ${generatePickMethod(methods)}
|
||||
path = ${genPath(path)}
|
||||
}
|
||||
`
|
||||
} else {
|
||||
code += ` else if (${genCheck(path)}) {
|
||||
if (method == null) method = ${generatePickMethod(methods)}
|
||||
path = ${genPath(path)}
|
||||
}
|
||||
`
|
||||
}
|
||||
}
|
||||
|
||||
return code
|
||||
}
|
||||
}
|
||||
|
||||
function generatePickMethod (methods) {
|
||||
if (methods.length === 1) {
|
||||
return `'${methods[0]}'`
|
||||
}
|
||||
const bodyMethod = getBodyMethod(methods)
|
||||
const noBodyMethod = getNoBodyMethod(methods)
|
||||
if (bodyMethod && noBodyMethod) {
|
||||
return `body == null ? '${noBodyMethod}' : '${bodyMethod}'`
|
||||
} else if (bodyMethod) {
|
||||
return `'${bodyMethod}'`
|
||||
} else {
|
||||
return `'${noBodyMethod}'`
|
||||
}
|
||||
}
|
||||
|
||||
function genBody (api, methods, body, spec) {
|
||||
const bodyMethod = getBodyMethod(methods)
|
||||
const { content_type } = spec[api].headers
|
||||
if (content_type && content_type.includes('application/x-ndjson')) {
|
||||
return 'bulkBody: body,'
|
||||
}
|
||||
if (body === null && bodyMethod) {
|
||||
return 'body: \'\','
|
||||
} else if (bodyMethod) {
|
||||
return 'body: body || \'\','
|
||||
} else {
|
||||
return 'body: null,'
|
||||
}
|
||||
}
|
||||
|
||||
function getBodyMethod (methods) {
|
||||
const m = methods.filter(m => ~allowedMethods.body.indexOf(m))
|
||||
if (m.length) return m[0]
|
||||
return null
|
||||
}
|
||||
|
||||
function getNoBodyMethod (methods) {
|
||||
const m = methods.filter(m => ~allowedMethods.noBody.indexOf(m))
|
||||
if (m.length) return m[0]
|
||||
return null
|
||||
}
|
||||
|
||||
function genUrlValidation (paths, api) {
|
||||
// this api does not need url validation
|
||||
if (!needsPathValidation(api)) return ''
|
||||
// gets only the dynamic components of the url in an array
|
||||
// then we reverse it. A parameters always require what is
|
||||
// at its right in the array.
|
||||
const chunks = paths
|
||||
.sort((a, b) => Object.keys(a.parts || {}).length > Object.keys(b.parts || {}).length ? -1 : 1)
|
||||
.slice(0, 1)
|
||||
.reduce((acc, val) => val.path, '')
|
||||
// .reduce((a, b) => a.path.split('/').length > b.path.split('/').length ? a.path : b.path)
|
||||
.split('/')
|
||||
.filter(s => s.startsWith('{'))
|
||||
.map(s => s.slice(1, -1))
|
||||
.reverse()
|
||||
|
||||
let code = ''
|
||||
|
||||
const len = chunks.length
|
||||
chunks.forEach((chunk, index) => {
|
||||
if (index === len - 1) return
|
||||
const params = []
|
||||
let camelCased = chunk[0] === '_'
|
||||
? '_' + chunk.slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: chunk.replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
if (chunk === camelCased) {
|
||||
code += `${index ? '} else ' : ''}if (params['${chunk}'] != null && (`
|
||||
} else {
|
||||
code += `${index ? '} else ' : ''}if ((params['${chunk}'] != null || params['${camelCased}'] != null) && (`
|
||||
}
|
||||
for (let i = index + 1; i < len; i++) {
|
||||
params.push(chunks[i])
|
||||
// url parts can be declared in camelCase fashion
|
||||
camelCased = chunks[i][0] === '_'
|
||||
? '_' + chunks[i].slice(1).replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
: chunks[i].replace(/_([a-z])/g, k => k[1].toUpperCase())
|
||||
|
||||
if (chunks[i] === camelCased) {
|
||||
code += `params['${chunks[i]}'] == null${i === len - 1 ? '' : ' || '}`
|
||||
} else {
|
||||
code += `(params['${chunks[i]}'] == null && params['${camelCased}'] == null)${i === len - 1 ? '' : ' || '}`
|
||||
}
|
||||
}
|
||||
code += `)) {
|
||||
const err = new this[kConfigurationError]('Missing required parameter of the url: ${params.join(', ')}')
|
||||
return handleError(err, callback)
|
||||
`
|
||||
})
|
||||
|
||||
if (chunks.length > 1) {
|
||||
code += '\n}'
|
||||
}
|
||||
|
||||
if (code.length) {
|
||||
code = '// check required url components\n' + code
|
||||
}
|
||||
|
||||
return code.trim()
|
||||
}
|
||||
|
||||
function generateDocumentation ({ documentation }, op) {
|
||||
// we use `replace(/\u00A0/g, ' ')` to remove no breaking spaces
|
||||
// because some parts of the description fields are using it
|
||||
|
||||
if (documentation == null) return ''
|
||||
|
||||
let doc = '/**\n'
|
||||
doc += ` * Perform a ${op} request\n`
|
||||
if (documentation.description) {
|
||||
doc += ` * ${documentation.description.replace(/\u00A0/g, ' ')}\n`
|
||||
}
|
||||
if (documentation.url) {
|
||||
doc += ` * ${documentation.url}\n`
|
||||
}
|
||||
doc += ' */'
|
||||
|
||||
return doc
|
||||
}
|
||||
|
||||
function needsPathValidation (api) {
|
||||
return noPathValidation.indexOf(api) === -1
|
||||
}
|
||||
|
||||
function intersect (first, ...rest) {
|
||||
return rest.reduce((accum, current) => {
|
||||
return accum.filter(x => current.indexOf(x) !== -1)
|
||||
}, first)
|
||||
}
|
||||
|
||||
function Uppercase (str) {
|
||||
return str[0].toUpperCase() + str.slice(1)
|
||||
}
|
||||
|
||||
module.exports = generateNamespace
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user