Compare commits
42 Commits
v9.0.0-alp
...
8.16
| Author | SHA1 | Date | |
|---|---|---|---|
| 39cdb17804 | |||
| ce4acd37ff | |||
| 655d62b7b5 | |||
| 7e7176fbf9 | |||
| e651cebb28 | |||
| 120845a662 | |||
| a00307827a | |||
| 25e8e84fe5 | |||
| 279c29d8f9 | |||
| 07f7ffea76 | |||
| 3d3c72dc40 | |||
| 3627a4c56b | |||
| fecda564da | |||
| f9a5a18a71 | |||
| 6fb0f426c3 | |||
| edfef68b01 | |||
| e27e096c5f | |||
| 8476ce6071 | |||
| 419271d82d | |||
| 1ef1754623 | |||
| be2fe317f2 | |||
| 1d2d934b50 | |||
| d0f5ada03d | |||
| 47de886973 | |||
| 1e103baec1 | |||
| 7d5f622506 | |||
| 8377b58af3 | |||
| c150efbd21 | |||
| e7663aabde | |||
| c9615dc0ef | |||
| bb5fb24d73 | |||
| 38358e20ab | |||
| 9479d82644 | |||
| 18df52feb4 | |||
| f72f9e9a5a | |||
| c4151ceb35 | |||
| f3aedc7ad0 | |||
| 586c42161d | |||
| 9947b0e365 | |||
| 52b7264b45 | |||
| fceebae8ae | |||
| e45ed28c05 |
@ -25,7 +25,7 @@ steps:
|
||||
provider: "gcp"
|
||||
image: family/core-ubuntu-2204
|
||||
plugins:
|
||||
- junit-annotate#v2.5.0:
|
||||
- junit-annotate#v2.4.1:
|
||||
artifacts: "junit-output/junit-*.xml"
|
||||
job-uuid-file-pattern: "junit-(.*).xml"
|
||||
fail-build-on-error: true
|
||||
|
||||
2
.github/make.sh
vendored
2
.github/make.sh
vendored
@ -176,7 +176,7 @@ else
|
||||
--rm \
|
||||
$product \
|
||||
/bin/bash -c "cd /usr/src && \
|
||||
git clone https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
git clone --branch $GENERATOR_BRANCH https://$CLIENTS_GITHUB_TOKEN@github.com/elastic/elastic-client-generator-js.git && \
|
||||
mkdir -p /usr/src/elastic-client-generator-js/output && \
|
||||
cd /usr/src/elasticsearch-js && \
|
||||
node .buildkite/make.mjs --task $TASK ${TASK_ARGS[*]}"
|
||||
|
||||
26
.github/stale.yml
vendored
Normal file
26
.github/stale.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 15
|
||||
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- "discussion"
|
||||
- "feature request"
|
||||
- "bug"
|
||||
- "todo"
|
||||
- "good first issue"
|
||||
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: |
|
||||
We understand that this might be important for you, but this issue has been automatically marked as stale because it has not had recent activity either from our end or yours.
|
||||
It will be closed if no further activity occurs, please write a comment if you would like to keep this going.
|
||||
|
||||
Note: in the past months we have built a new client, that has just landed in master. If you want to open an issue or a pr for the legacy client, you should do that in https://github.com/elastic/elasticsearch-js-legacy
|
||||
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
||||
26
.github/workflows/nodejs.yml
vendored
26
.github/workflows/nodejs.yml
vendored
@ -11,10 +11,10 @@ jobs:
|
||||
outputs:
|
||||
src-only: "${{ steps.changes.outputs.src-only }}"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dorny/paths-filter/@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
- uses: dorny/paths-filter/@v3.0.2
|
||||
id: changes
|
||||
with:
|
||||
filters: |
|
||||
@ -36,12 +36,12 @@ jobs:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
@ -66,12 +66,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22.x
|
||||
|
||||
@ -96,12 +96,12 @@ jobs:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Use Bun
|
||||
uses: oven-sh/setup-bun@4bc047ad259df6fc24a6c9b0f9a0cb08cf17fbe5 # v2
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Install
|
||||
run: |
|
||||
@ -118,3 +118,13 @@ jobs:
|
||||
- name: ECMAScript module test
|
||||
run: |
|
||||
bun run test:esm
|
||||
|
||||
auto-approve:
|
||||
name: Auto-approve
|
||||
needs: [test, license]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
if: github.actor == 'elasticmachine'
|
||||
steps:
|
||||
- uses: hmarr/auto-approve-action@v4
|
||||
|
||||
15
.github/workflows/npm-publish.yml
vendored
15
.github/workflows/npm-publish.yml
vendored
@ -12,29 +12,26 @@ jobs:
|
||||
contents: write
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
- uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22.x"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- run: npm install -g npm
|
||||
- run: npm install
|
||||
- run: npm test
|
||||
- run: npm publish --provenance --access public --tag alpha
|
||||
- run: npm publish --provenance --access public
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
- name: Publish version on GitHub
|
||||
run: |
|
||||
- run: |
|
||||
version=$(jq -r .version package.json)
|
||||
gh release create \
|
||||
-n "This is a 9.0.0 pre-release alpha. Changes may not be stable." \
|
||||
--latest=false \
|
||||
--prerelease \
|
||||
-n "[Changelog](https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/$BRANCH_NAME/changelog-client.html)" \
|
||||
--target "$BRANCH_NAME" \
|
||||
--title "v$version" \
|
||||
-t "v$version" \
|
||||
"v$version"
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch }}
|
||||
|
||||
6
.github/workflows/serverless-patch.yml
vendored
6
.github/workflows/serverless-patch.yml
vendored
@ -26,14 +26,14 @@ jobs:
|
||||
)
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: elastic/elasticsearch-js
|
||||
ref: main
|
||||
path: stack
|
||||
fetch-depth: 0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
repository: elastic/elasticsearch-serverless-js
|
||||
@ -42,7 +42,7 @@ jobs:
|
||||
- name: Apply patch from stack to serverless
|
||||
id: apply-patch
|
||||
run: $GITHUB_WORKSPACE/stack/.github/workflows/serverless-patch.sh
|
||||
- uses: peter-evans/create-pull-request@5e914681df9dc83aa4e4905692ca88beb2f9e91f # v7
|
||||
- uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GH_TOKEN }}
|
||||
path: serverless
|
||||
|
||||
12
.github/workflows/stale.yml
vendored
12
.github/workflows/stale.yml
vendored
@ -1,21 +1,21 @@
|
||||
---
|
||||
name: "Close stale issues and PRs"
|
||||
name: 'Close stale issues and PRs'
|
||||
on:
|
||||
schedule:
|
||||
- cron: "30 1 * * *"
|
||||
- cron: '30 1 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9
|
||||
- uses: actions/stale@v8
|
||||
with:
|
||||
stale-issue-label: stale
|
||||
stale-pr-label: stale
|
||||
days-before-stale: 90
|
||||
days-before-close: 14
|
||||
exempt-issue-labels: "good first issue,tracking"
|
||||
exempt-issue-labels: 'good first issue'
|
||||
close-issue-label: closed-stale
|
||||
close-pr-label: closed-stale
|
||||
stale-issue-message: "This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
||||
stale-pr-message: "This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days."
|
||||
stale-issue-message: 'This issue is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
||||
stale-pr-message: 'This pull request is stale because it has been open 90 days with no activity. Remove the `stale` label, or leave a comment, or this will be closed in 14 days.'
|
||||
|
||||
@ -42,9 +42,6 @@ spec:
|
||||
main:
|
||||
branch: "main"
|
||||
cronline: "@daily"
|
||||
8_x:
|
||||
branch: "8.x"
|
||||
cronline: "@daily"
|
||||
8_14:
|
||||
branch: "8.16"
|
||||
branch: "8.14"
|
||||
cronline: "@daily"
|
||||
|
||||
@ -2,15 +2,31 @@
|
||||
== Release notes
|
||||
|
||||
[discrete]
|
||||
=== 9.0.0
|
||||
=== 8.16.4
|
||||
|
||||
[discrete]
|
||||
==== Breaking changes
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Drop support for deprecated `body` parameter
|
||||
===== Improved support for Elasticsearch `v8.16`
|
||||
|
||||
In 8.0, the top-level `body` parameter that was available on all API functions <<remove-body-key,was deprecated>>. In 9.0 this property is completely removed.
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
===== Report correct transport connection type in telemetry
|
||||
|
||||
The client's telemetry reporting mechanism was incorrectly reporting all traffic as using `HttpConnection` when the default is `UndiciConnection`. https://github.com/elastic/elasticsearch-js/issues/2324[#2324]
|
||||
|
||||
[discrete]
|
||||
=== 8.16.3
|
||||
|
||||
[discrete]
|
||||
==== Fixes
|
||||
|
||||
[discrete]
|
||||
===== Improved support for Elasticsearch `v8.16`
|
||||
|
||||
Updated TypeScript types based on fixes and improvements to the Elasticsearch specification.
|
||||
|
||||
[discrete]
|
||||
=== 8.16.2
|
||||
@ -656,7 +672,6 @@ ac.abort()
|
||||
----
|
||||
|
||||
[discrete]
|
||||
[[remove-body-key]]
|
||||
===== Remove the body key from the request
|
||||
|
||||
*Breaking: Yes* | *Migration effort: Small*
|
||||
|
||||
11
docs/doc_examples/00ad41bde67beac991534ae0e04b1296.asciidoc
Normal file
11
docs/doc_examples/00ad41bde67beac991534ae0e04b1296.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getDataStream({
|
||||
name: "my-data-stream",
|
||||
filter_path: "data_streams.indices.index_name",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/0722b302b2b3275a988d858044f99d5d.asciidoc
Normal file
10
docs/doc_examples/0722b302b2b3275a988d858044f99d5d.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getMapping({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -11,6 +11,8 @@ const response = await client.indices.putSettings({
|
||||
"index.indexing.slowlog.threshold.index.debug": "2s",
|
||||
"index.indexing.slowlog.threshold.index.trace": "500ms",
|
||||
"index.indexing.slowlog.source": "1000",
|
||||
"index.indexing.slowlog.reformat": true,
|
||||
"index.indexing.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
42
docs/doc_examples/082e78c7a2061a7c4a52b494e5ede0e8.asciidoc
Normal file
42
docs/doc_examples/082e78c7a2061a7c4a52b494e5ede0e8.asciidoc
Normal file
@ -0,0 +1,42 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -3,17 +3,11 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_query/async",
|
||||
querystring: {
|
||||
format: "json",
|
||||
},
|
||||
body: {
|
||||
query:
|
||||
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
},
|
||||
const response = await client.esql.asyncQuery({
|
||||
format: "json",
|
||||
query:
|
||||
"\n FROM my-index-000001,cluster_one:my-index-000001,cluster_two:my-index*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -10,7 +10,7 @@ const response = await client.ingest.putPipeline({
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: false,
|
||||
remove_binary: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -3,23 +3,20 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_application/search_application/my-app/_render_query",
|
||||
body: {
|
||||
params: {
|
||||
query_string: "my first query",
|
||||
text_fields: [
|
||||
{
|
||||
name: "title",
|
||||
boost: 5,
|
||||
},
|
||||
{
|
||||
name: "description",
|
||||
boost: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
const response = await client.searchApplication.renderQuery({
|
||||
name: "my-app",
|
||||
params: {
|
||||
query_string: "my first query",
|
||||
text_fields: [
|
||||
{
|
||||
name: "title",
|
||||
boost: 5,
|
||||
},
|
||||
{
|
||||
name: "description",
|
||||
boost: 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
@ -4,9 +4,11 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: "my-index-000001",
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
settings: {
|
||||
"index.blocks.read_only_allow_delete": null,
|
||||
index: {
|
||||
number_of_replicas: "<original_number_of_replicas>",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
19
docs/doc_examples/120fcf9f55128d6a81d5e87a9c235bbd.asciidoc
Normal file
19
docs/doc_examples/120fcf9f55128d6a81d5e87a9c235bbd.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.chatCompletionUnified({
|
||||
inference_id: "openai-completion",
|
||||
chat_completion_request: {
|
||||
model: "gpt-4o",
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: "What is Elastic?",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
11
docs/doc_examples/12adea5d76f73d94d80d42f53f67563f.asciidoc
Normal file
11
docs/doc_examples/12adea5d76f73d94d80d42f53f67563f.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.addBlock({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
block: "read_only",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
25
docs/doc_examples/13d91782399ba1f291e103c18b5338cc.asciidoc
Normal file
25
docs/doc_examples/13d91782399ba1f291e103c18b5338cc.asciidoc
Normal file
@ -0,0 +1,25 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.createFrom({
|
||||
source: "my-index",
|
||||
dest: "my-new-index",
|
||||
create_from: {
|
||||
settings_override: {
|
||||
index: {
|
||||
number_of_shards: 5,
|
||||
},
|
||||
},
|
||||
mappings_override: {
|
||||
properties: {
|
||||
field2: {
|
||||
type: "boolean",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,7 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.inference({
|
||||
task_type: "sparse_embedding",
|
||||
const response = await client.inference.sparseEmbedding({
|
||||
inference_id: "my-elser-model",
|
||||
input:
|
||||
"The sky above the port was the color of television tuned to a dead channel.",
|
||||
|
||||
15
docs/doc_examples/141ef0ebaa3b0772892b79b9bb85efb0.asciidoc
Normal file
15
docs/doc_examples/141ef0ebaa3b0772892b79b9bb85efb0.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.update({
|
||||
inference_id: "my-inference-endpoint",
|
||||
inference_config: {
|
||||
service_settings: {
|
||||
api_key: "<API_KEY>",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/15ac33d641b376d9494075eb1f0d4066.asciidoc
Normal file
10
docs/doc_examples/15ac33d641b376d9494075eb1f0d4066.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.cancelMigrateReindex({
|
||||
index: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/174b93c323aa8e9cc8ee2a3df5736810.asciidoc
Normal file
12
docs/doc_examples/174b93c323aa8e9cc8ee2a3df5736810.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.delegatePki({
|
||||
x509_certificate_chain: [
|
||||
"MIIDeDCCAmCgAwIBAgIUBzj/nGGKxP2iXawsSquHmQjCJmMwDQYJKoZIhvcNAQELBQAwUzErMCkGA1UEAxMiRWxhc3RpY3NlYXJjaCBUZXN0IEludGVybWVkaWF0ZSBDQTEWMBQGA1UECxMNRWxhc3RpY3NlYXJjaDEMMAoGA1UEChMDb3JnMB4XDTIzMDcxODE5MjkwNloXDTQzMDcxMzE5MjkwNlowSjEiMCAGA1UEAxMZRWxhc3RpY3NlYXJjaCBUZXN0IENsaWVudDEWMBQGA1UECxMNRWxhc3RpY3NlYXJjaDEMMAoGA1UEChMDb3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAllHL4pQkkfwAm/oLkxYYO+r950DEy1bjH+4viCHzNADLCTWO+lOZJVlNx7QEzJE3QGMdif9CCBBxQFMapA7oUFCLq84fPSQQu5AnvvbltVD9nwVtCs+9ZGDjMKsz98RhSLMFIkxdxi6HkQ3Lfa4ZSI4lvba4oo+T/GveazBDS+NgmKyq00EOXt3tWi1G9vEVItommzXWfv0agJWzVnLMldwkPqsw0W7zrpyT7FZS4iLbQADGceOW8fiauOGMkscu9zAnDR/SbWl/chYioQOdw6ndFLn1YIFPd37xL0WsdsldTpn0vH3YfzgLMffT/3P6YlwBegWzsx6FnM/93Ecb4wIDAQABo00wSzAJBgNVHRMEAjAAMB0GA1UdDgQWBBQKNRwjW+Ad/FN1Rpoqme/5+jrFWzAfBgNVHSMEGDAWgBRcya0c0x/PaI7MbmJVIylWgLqXNjANBgkqhkiG9w0BAQsFAAOCAQEACZ3PF7Uqu47lplXHP6YlzYL2jL0D28hpj5lGtdha4Muw1m/BjDb0Pu8l0NQ1z3AP6AVcvjNDkQq6Y5jeSz0bwQlealQpYfo7EMXjOidrft1GbqOMFmTBLpLA9SvwYGobSTXWTkJzonqVaTcf80HpMgM2uEhodwTcvz6v1WEfeT/HMjmdIsq4ImrOL9RNrcZG6nWfw0HR3JNOgrbfyEztEI471jHznZ336OEcyX7gQuvHE8tOv5+oD1d7s3Xg1yuFp+Ynh+FfOi3hPCuaHA+7F6fLmzMDLVUBAllugst1C3U+L/paD7tqIa4ka+KNPCbSfwazmJrt4XNiivPR4hwH5g==",
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,27 +3,23 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_ingest/_simulate",
|
||||
body: {
|
||||
docs: [
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "123",
|
||||
_source: {
|
||||
foo: "bar",
|
||||
},
|
||||
const response = await client.simulate.ingest({
|
||||
docs: [
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "123",
|
||||
_source: {
|
||||
foo: "bar",
|
||||
},
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "456",
|
||||
_source: {
|
||||
foo: "rab",
|
||||
},
|
||||
},
|
||||
{
|
||||
_index: "my-index",
|
||||
_id: "456",
|
||||
_source: {
|
||||
foo: "rab",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
19
docs/doc_examples/1ead35c954963e83f89872048dabdbe9.asciidoc
Normal file
19
docs/doc_examples/1ead35c954963e83f89872048dabdbe9.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.queryRole({
|
||||
query: {
|
||||
bool: {
|
||||
must_not: {
|
||||
term: {
|
||||
"metadata._reserved": true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
sort: ["name"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
67
docs/doc_examples/246763219ec06172f7aa57bba28d344a.asciidoc
Normal file
67
docs/doc_examples/246763219ec06172f7aa57bba28d344a.asciidoc
Normal file
@ -0,0 +1,67 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-bit",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "bit",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.bulk({
|
||||
index: "my-rank-vectors-bit",
|
||||
refresh: "true",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: [127, -127, 0, 1, 42],
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
my_vector: "8100012a7f",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.search({
|
||||
index: "my-rank-vectors-bit",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[
|
||||
0.35, 0.77, 0.95, 0.15, 0.11, 0.08, 0.58, 0.06, 0.44, 0.52, 0.21,
|
||||
0.62, 0.65, 0.16, 0.64, 0.39, 0.93, 0.06, 0.93, 0.31, 0.92, 0,
|
||||
0.66, 0.86, 0.92, 0.03, 0.81, 0.31, 0.2, 0.92, 0.95, 0.64, 0.19,
|
||||
0.26, 0.77, 0.64, 0.78, 0.32, 0.97, 0.84,
|
||||
],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
11
docs/doc_examples/272e27bf1fcc4fe5dbd4092679dd0342.asciidoc
Normal file
11
docs/doc_examples/272e27bf1fcc4fe5dbd4092679dd0342.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.addBlock({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
block: "write",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
15
docs/doc_examples/29aeabacb1fdf5b083d5f091b6d1bd44.asciidoc
Normal file
15
docs/doc_examples/29aeabacb1fdf5b083d5f091b6d1bd44.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.migrateReindex({
|
||||
reindex: {
|
||||
source: {
|
||||
index: "my-data-stream",
|
||||
},
|
||||
mode: "upgrade",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,14 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_security/oidc/logout",
|
||||
body: {
|
||||
token:
|
||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
||||
},
|
||||
const response = await client.security.oidcLogout({
|
||||
token:
|
||||
"dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==",
|
||||
refresh_token: "vLBPvmAB6KvwvJZr27cS",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
26
docs/doc_examples/2a21674c40f9b182a8944769d20b2357.asciidoc
Normal file
26
docs/doc_examples/2a21674c40f9b182a8944769d20b2357.asciidoc
Normal file
@ -0,0 +1,26 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-rank-vectors-float",
|
||||
query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source: "maxSimDotProduct(params.query_vector, 'my_vector')",
|
||||
params: {
|
||||
query_vector: [
|
||||
[0.5, 10, 6],
|
||||
[-0.5, 10, 10],
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
35
docs/doc_examples/2a67608dadbf220a2f040f3a79d3677d.asciidoc
Normal file
35
docs/doc_examples/2a67608dadbf220a2f040f3a79d3677d.asciidoc
Normal file
@ -0,0 +1,35 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "attachment",
|
||||
description: "Extract attachment information including original binary",
|
||||
processors: [
|
||||
{
|
||||
attachment: {
|
||||
field: "data",
|
||||
remove_binary: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
pipeline: "attachment",
|
||||
document: {
|
||||
data: "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=",
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
|
||||
const response2 = await client.get({
|
||||
index: "my-index-000001",
|
||||
id: "my_id",
|
||||
});
|
||||
console.log(response2);
|
||||
----
|
||||
24
docs/doc_examples/2afd49985950cbcccf727fa858d00067.asciidoc
Normal file
24
docs/doc_examples/2afd49985950cbcccf727fa858d00067.asciidoc
Normal file
@ -0,0 +1,24 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
query: {
|
||||
match: {
|
||||
my_field: "Which country is Paris in?",
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_field: {
|
||||
type: "semantic",
|
||||
number_of_fragments: 2,
|
||||
order: "score",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,12 +3,9 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "GET",
|
||||
path: "/_query/async/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||
querystring: {
|
||||
wait_for_completion_timeout: "30s",
|
||||
},
|
||||
const response = await client.esql.asyncQueryGet({
|
||||
id: "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=",
|
||||
wait_for_completion_timeout: "30s",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
23
docs/doc_examples/2f72a63c73dd672ac2dc3997ad15dd41.asciidoc
Normal file
23
docs/doc_examples/2f72a63c73dd672ac2dc3997ad15dd41.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
mappings: {
|
||||
properties: {
|
||||
source_field: {
|
||||
type: "text",
|
||||
fields: {
|
||||
infer_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: ".elser-2-elasticsearch",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
22
docs/doc_examples/2f9ee29fe49f7d206a41212aa5945296.asciidoc
Normal file
22
docs/doc_examples/2f9ee29fe49f7d206a41212aa5945296.asciidoc
Normal file
@ -0,0 +1,22 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.createFrom({
|
||||
source: "my-index",
|
||||
dest: "my-new-index",
|
||||
create_from: {
|
||||
settings_override: {
|
||||
index: {
|
||||
"blocks.write": null,
|
||||
"blocks.read": null,
|
||||
"blocks.read_only": null,
|
||||
"blocks.read_only_allow_delete": null,
|
||||
"blocks.metadata": null,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
28
docs/doc_examples/31832bd71c31c46a1ccf8d1c210d89d4.asciidoc
Normal file
28
docs/doc_examples/31832bd71c31c46a1ccf8d1c210d89d4.asciidoc
Normal file
@ -0,0 +1,28 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-*",
|
||||
query: {
|
||||
bool: {
|
||||
must: [
|
||||
{
|
||||
match: {
|
||||
"user.id": "kimchy",
|
||||
},
|
||||
},
|
||||
],
|
||||
must_not: [
|
||||
{
|
||||
terms: {
|
||||
_index: ["my-index-01"],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
31
docs/doc_examples/32c8c86702ccd68eb70f1573409c2a1f.asciidoc
Normal file
31
docs/doc_examples/32c8c86702ccd68eb70f1573409c2a1f.asciidoc
Normal file
@ -0,0 +1,31 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ilm.putLifecycle({
|
||||
name: "my_policy",
|
||||
policy: {
|
||||
phases: {
|
||||
hot: {
|
||||
actions: {
|
||||
rollover: {
|
||||
max_primary_shard_size: "50gb",
|
||||
},
|
||||
searchable_snapshot: {
|
||||
snapshot_repository: "backing_repo",
|
||||
replicate_for: "14d",
|
||||
},
|
||||
},
|
||||
},
|
||||
delete: {
|
||||
min_age: "28d",
|
||||
actions: {
|
||||
delete: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -14,6 +14,7 @@ const response = await client.indices.putSettings({
|
||||
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||
"index.search.slowlog.include.user": true,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
@ -9,7 +9,6 @@ const response = await client.indices.create({
|
||||
properties: {
|
||||
semantic_text: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
content: {
|
||||
type: "text",
|
||||
70
docs/doc_examples/36792c81c053e0555407d1e83e7e054f.asciidoc
Normal file
70
docs/doc_examples/36792c81c053e0555407d1e83e7e054f.asciidoc
Normal file
@ -0,0 +1,70 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "movies",
|
||||
size: 10,
|
||||
retriever: {
|
||||
rescorer: {
|
||||
rescore: {
|
||||
window_size: 50,
|
||||
query: {
|
||||
rescore_query: {
|
||||
script_score: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
script: {
|
||||
source:
|
||||
"cosineSimilarity(params.queryVector, 'product-vector_final_stage') + 1.0",
|
||||
params: {
|
||||
queryVector: [-0.5, 90, -10, 14.8, -156],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
retriever: {
|
||||
rrf: {
|
||||
rank_window_size: 100,
|
||||
retrievers: [
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "plot_embedding",
|
||||
inference_id: "my-elser-model",
|
||||
query: "films that explore psychological depths",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
standard: {
|
||||
query: {
|
||||
multi_match: {
|
||||
query: "crime",
|
||||
fields: ["plot", "title"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [10, 22, 77],
|
||||
k: 10,
|
||||
num_candidates: 10,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
23
docs/doc_examples/3722dad876023e0757138dd5a6d3240e.asciidoc
Normal file
23
docs/doc_examples/3722dad876023e0757138dd5a6d3240e.asciidoc
Normal file
@ -0,0 +1,23 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-index",
|
||||
settings: {
|
||||
index: {
|
||||
number_of_shards: 3,
|
||||
"blocks.write": true,
|
||||
},
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
field1: {
|
||||
type: "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,23 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.bulk({
|
||||
index: "test-index",
|
||||
operations: [
|
||||
{
|
||||
update: {
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
doc: {
|
||||
infer_field: "updated inference field",
|
||||
source_field: "updated source field",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
19
docs/doc_examples/3a204b57072a104d9b50f3a9e064a8f6.asciidoc
Normal file
19
docs/doc_examples/3a204b57072a104d9b50f3a9e064a8f6.asciidoc
Normal file
@ -0,0 +1,19 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
size: 0,
|
||||
aggs: {
|
||||
job_ids: {
|
||||
terms: {
|
||||
field: "job_id",
|
||||
size: 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
61
docs/doc_examples/3bc4a3681e3ea9cb3de49f72085807d8.asciidoc
Normal file
61
docs/doc_examples/3bc4a3681e3ea9cb3de49f72085807d8.asciidoc
Normal file
@ -0,0 +1,61 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "retrievers_example",
|
||||
retriever: {
|
||||
linear: {
|
||||
retrievers: [
|
||||
{
|
||||
retriever: {
|
||||
standard: {
|
||||
query: {
|
||||
function_score: {
|
||||
query: {
|
||||
term: {
|
||||
topic: "ai",
|
||||
},
|
||||
},
|
||||
functions: [
|
||||
{
|
||||
script_score: {
|
||||
script: {
|
||||
source: "doc['timestamp'].value.millis",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
boost_mode: "replace",
|
||||
},
|
||||
},
|
||||
sort: {
|
||||
timestamp: {
|
||||
order: "asc",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
weight: 2,
|
||||
normalizer: "minmax",
|
||||
},
|
||||
{
|
||||
retriever: {
|
||||
knn: {
|
||||
field: "vector",
|
||||
query_vector: [0.23, 0.67, 0.89],
|
||||
k: 3,
|
||||
num_candidates: 5,
|
||||
},
|
||||
},
|
||||
weight: 1.5,
|
||||
},
|
||||
],
|
||||
rank_window_size: 10,
|
||||
},
|
||||
},
|
||||
_source: false,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
16
docs/doc_examples/3ea4c971b3f47735dcc207ee2645fa03.asciidoc
Normal file
16
docs/doc_examples/3ea4c971b3f47735dcc207ee2645fa03.asciidoc
Normal file
@ -0,0 +1,16 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.updateAliases({
|
||||
actions: [
|
||||
{
|
||||
remove_index: {
|
||||
index: "my-index-2099.05.06-000001",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,14 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_query/async",
|
||||
body: {
|
||||
query:
|
||||
"\n FROM library\n | EVAL year = DATE_TRUNC(1 YEARS, release_date)\n | STATS MAX(page_count) BY year\n | SORT year\n | LIMIT 5\n ",
|
||||
wait_for_completion_timeout: "2s",
|
||||
},
|
||||
const response = await client.esql.asyncQuery({
|
||||
query:
|
||||
"\n FROM library\n | EVAL year = DATE_TRUNC(1 YEARS, release_date)\n | STATS MAX(page_count) BY year\n | SORT year\n | LIMIT 5\n ",
|
||||
wait_for_completion_timeout: "2s",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
18
docs/doc_examples/3f9dcf2aa42f3ecfb5ebfe48c1774103.asciidoc
Normal file
18
docs/doc_examples/3f9dcf2aa42f3ecfb5ebfe48c1774103.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
order_stats: {
|
||||
stats: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,9 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "GET",
|
||||
path: "/_query/async/FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=",
|
||||
const response = await client.esql.asyncQueryGet({
|
||||
id: "FkpMRkJGS1gzVDRlM3g4ZzMyRGlLbkEaTXlJZHdNT09TU2VTZVBoNDM3cFZMUToxMDM=",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
18
docs/doc_examples/41d24383d29b2808a65258a0a3256e96.asciidoc
Normal file
18
docs/doc_examples/41d24383d29b2808a65258a0a3256e96.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "jinaai-index",
|
||||
mappings: {
|
||||
properties: {
|
||||
content: {
|
||||
type: "semantic_text",
|
||||
inference_id: "jinaai-embeddings",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
46
docs/doc_examples/45954b8aaedfed57012be8b6538b0a24.asciidoc
Normal file
46
docs/doc_examples/45954b8aaedfed57012be8b6538b0a24.asciidoc
Normal file
@ -0,0 +1,46 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.chatCompletionUnified({
|
||||
inference_id: "openai-completion",
|
||||
chat_completion_request: {
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: "What's the price of a scarf?",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
tools: [
|
||||
{
|
||||
type: "function",
|
||||
function: {
|
||||
name: "get_current_price",
|
||||
description: "Get the current price of a item",
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: {
|
||||
item: {
|
||||
id: "123",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
tool_choice: {
|
||||
type: "function",
|
||||
function: {
|
||||
name: "get_current_price",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/46b771a9932c3fa6057a7b2679c72ef0.asciidoc
Normal file
10
docs/doc_examples/46b771a9932c3fa6057a7b2679c72ef0.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getMigrateReindexStatus({
|
||||
index: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -10,7 +10,8 @@ const response = await client.inference.put({
|
||||
service: "openai",
|
||||
service_settings: {
|
||||
api_key: "<api_key>",
|
||||
model_id: "text-embedding-ada-002",
|
||||
model_id: "text-embedding-3-small",
|
||||
dimensions: 128,
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -6,6 +6,11 @@
|
||||
const response = await client.ml.startTrainedModelDeployment({
|
||||
model_id: "my_model",
|
||||
deployment_id: "my_model_for_search",
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 3,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -3,12 +3,9 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_inference/completion/openai-completion/_stream",
|
||||
body: {
|
||||
input: "What is Elastic?",
|
||||
},
|
||||
const response = await client.inference.streamCompletion({
|
||||
inference_id: "openai-completion",
|
||||
input: "What is Elastic?",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -5,16 +5,11 @@
|
||||
----
|
||||
const response = await client.inference.put({
|
||||
task_type: "sparse_embedding",
|
||||
inference_id: "my-elser-endpoint",
|
||||
inference_id: "elser-model-eis",
|
||||
inference_config: {
|
||||
service: "elser",
|
||||
service: "elastic",
|
||||
service_settings: {
|
||||
adaptive_allocations: {
|
||||
enabled: true,
|
||||
min_number_of_allocations: 3,
|
||||
max_number_of_allocations: 10,
|
||||
},
|
||||
num_threads: 1,
|
||||
model_name: "elser",
|
||||
},
|
||||
},
|
||||
});
|
||||
@ -3,15 +3,18 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.knnSearch({
|
||||
index: "my-index",
|
||||
const response = await client.search({
|
||||
index: "image-index",
|
||||
knn: {
|
||||
field: "image_vector",
|
||||
query_vector: [0.3, 0.1, 1.2],
|
||||
field: "image-vector",
|
||||
query_vector: [-5, 9, -12],
|
||||
k: 10,
|
||||
num_candidates: 100,
|
||||
rescore_vector: {
|
||||
oversample: 2,
|
||||
},
|
||||
},
|
||||
_source: ["name", "file_type"],
|
||||
fields: ["title", "file-type"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,7 +5,7 @@
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"cluster.routing.allocation.disk.watermark.low": "30gb",
|
||||
"migrate.data_stream_reindex_max_request_per_second": 10000,
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
18
docs/doc_examples/53d9d2ec9cb8d211772d764e76fe6890.asciidoc
Normal file
18
docs/doc_examples/53d9d2ec9cb8d211772d764e76fe6890.asciidoc
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.simulate({
|
||||
id: "query_helper_pipeline",
|
||||
docs: [
|
||||
{
|
||||
_source: {
|
||||
content:
|
||||
"artificial intelligence in medicine articles published in the last 12 months",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,14 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_security/oidc/prepare",
|
||||
body: {
|
||||
realm: "oidc1",
|
||||
state: "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO",
|
||||
nonce: "zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5",
|
||||
},
|
||||
const response = await client.security.oidcPrepareAuthentication({
|
||||
realm: "oidc1",
|
||||
state: "lGYK0EcSLjqH6pkT5EVZjC6eIW5YCGgywj2sxROO",
|
||||
nonce: "zOBXLJGUooRrbLbQk5YCcyC8AXw3iloynvluYhZ5",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -3,11 +3,13 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.inference({
|
||||
task_type: "my-inference-endpoint",
|
||||
inference_id: "_update",
|
||||
service_settings: {
|
||||
api_key: "<API_KEY>",
|
||||
const response = await client.search({
|
||||
index: "jinaai-index",
|
||||
query: {
|
||||
semantic: {
|
||||
field: "content",
|
||||
query: "who inspired taking care of the sea?",
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
10
docs/doc_examples/59aa5216630f80c5dc298fc5bba4a819.asciidoc
Normal file
10
docs/doc_examples/59aa5216630f80c5dc298fc5bba4a819.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getSettings({
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,17 +3,11 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_query/async",
|
||||
querystring: {
|
||||
format: "json",
|
||||
},
|
||||
body: {
|
||||
query:
|
||||
"\n FROM cluster_one:my-index*,cluster_two:logs*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
},
|
||||
const response = await client.esql.asyncQuery({
|
||||
format: "json",
|
||||
query:
|
||||
"\n FROM cluster_one:my-index*,cluster_two:logs*\n | STATS COUNT(http.response.status_code) BY user.id\n | LIMIT 2\n ",
|
||||
include_ccs_metadata: true,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
11
docs/doc_examples/615dc36f0978c676624fb7d1144b4899.asciidoc
Normal file
11
docs/doc_examples/615dc36f0978c676624fb7d1144b4899.asciidoc
Normal file
@ -0,0 +1,11 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getDataLifecycleStats({
|
||||
human: "true",
|
||||
pretty: "true",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
12
docs/doc_examples/66915e95b723ee2f6e5164a94b8f98c1.asciidoc
Normal file
12
docs/doc_examples/66915e95b723ee2f6e5164a94b8f98c1.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.createFrom({
|
||||
source: "my-index",
|
||||
dest: "my-new-index",
|
||||
create_from: null,
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/67b71a95b6fe6c83faae51ea038a1bf1.asciidoc
Normal file
10
docs/doc_examples/67b71a95b6fe6c83faae51ea038a1bf1.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.asyncQueryDelete({
|
||||
id: "FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI=",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
15
docs/doc_examples/6b67c6121efb86ee100d40c2646f77b5.asciidoc
Normal file
15
docs/doc_examples/6b67c6121efb86ee100d40c2646f77b5.asciidoc
Normal file
@ -0,0 +1,15 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: "*",
|
||||
settings: {
|
||||
"index.search.slowlog.include.user": true,
|
||||
"index.search.slowlog.threshold.fetch.warn": "30s",
|
||||
"index.search.slowlog.threshold.query.warn": "30s",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -6,14 +6,15 @@
|
||||
const response = await client.search({
|
||||
index: "test-index",
|
||||
query: {
|
||||
nested: {
|
||||
path: "inference_field.inference.chunks",
|
||||
query: {
|
||||
sparse_vector: {
|
||||
field: "inference_field.inference.chunks.embeddings",
|
||||
inference_id: "my-inference-id",
|
||||
query: "mountain lake",
|
||||
},
|
||||
match: {
|
||||
my_semantic_field: "Which country is Paris in?",
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_semantic_field: {
|
||||
number_of_fragments: 2,
|
||||
order: "score",
|
||||
},
|
||||
},
|
||||
},
|
||||
16
docs/doc_examples/6e498b9dc753b94abf2618c407fa5cd8.asciidoc
Normal file
16
docs/doc_examples/6e498b9dc753b94abf2618c407fa5cd8.asciidoc
Normal file
@ -0,0 +1,16 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.reindex({
|
||||
wait_for_completion: "false",
|
||||
source: {
|
||||
index: ".ml-anomalies-custom-example",
|
||||
},
|
||||
dest: {
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,12 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_security/api_key/_bulk_update",
|
||||
body: {
|
||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||
},
|
||||
const response = await client.security.bulkUpdateApiKeys({
|
||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -12,6 +12,13 @@ const response = await client.search({
|
||||
fields: ["my_field", "my_field._2gram", "my_field._3gram"],
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
my_field: {
|
||||
matched_fields: ["my_field._index_prefix"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
24
docs/doc_examples/730045fae3743c39b612813a42c330c3.asciidoc
Normal file
24
docs/doc_examples/730045fae3743c39b612813a42c330c3.asciidoc
Normal file
@ -0,0 +1,24 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "my-index-000001",
|
||||
query: {
|
||||
prefix: {
|
||||
full_name: {
|
||||
value: "ki",
|
||||
},
|
||||
},
|
||||
},
|
||||
highlight: {
|
||||
fields: {
|
||||
full_name: {
|
||||
matched_fields: ["full_name._index_prefix"],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,7 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.inference({
|
||||
task_type: "text_embedding",
|
||||
const response = await client.inference.textEmbedding({
|
||||
inference_id: "my-cohere-endpoint",
|
||||
input:
|
||||
"The sky above the port was the color of television tuned to a dead channel.",
|
||||
|
||||
33
docs/doc_examples/7478ff69113fb53f41ea07cdf911fa67.asciidoc
Normal file
33
docs/doc_examples/7478ff69113fb53f41ea07cdf911fa67.asciidoc
Normal file
@ -0,0 +1,33 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
daily_sales: {
|
||||
date_histogram: {
|
||||
field: "order_date",
|
||||
calendar_interval: "day",
|
||||
},
|
||||
aggs: {
|
||||
daily_revenue: {
|
||||
sum: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
smoothed_revenue: {
|
||||
moving_fn: {
|
||||
buckets_path: "daily_revenue",
|
||||
window: 3,
|
||||
script: "MovingFunctions.unweightedAvg(values)",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -1,26 +0,0 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "test-index",
|
||||
query: {
|
||||
nested: {
|
||||
path: "inference_field.inference.chunks",
|
||||
query: {
|
||||
knn: {
|
||||
field: "inference_field.inference.chunks.embeddings",
|
||||
query_vector_builder: {
|
||||
text_embedding: {
|
||||
model_id: "my_inference_id",
|
||||
model_text: "mountain lake",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,35 +3,31 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_text_structure/find_message_structure",
|
||||
body: {
|
||||
messages: [
|
||||
"[2024-03-05T10:52:36,256][INFO ][o.a.l.u.VectorUtilPanamaProvider] [laptop] Java vector incubator API enabled; uses preferredBitSize=128",
|
||||
"[2024-03-05T10:52:41,038][INFO ][o.e.p.PluginsService ] [laptop] loaded module [repository-url]",
|
||||
"[2024-03-05T10:52:41,042][INFO ][o.e.p.PluginsService ] [laptop] loaded module [rest-root]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-core]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-redact]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [ingest-user-agent]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-monitoring]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [repository-s3]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-analytics]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-ent-search]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-autoscaling]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [lang-painless]]",
|
||||
"[2024-03-05T10:52:41,059][INFO ][o.e.p.PluginsService ] [laptop] loaded module [lang-expression]",
|
||||
"[2024-03-05T10:52:41,059][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-eql]",
|
||||
"[2024-03-05T10:52:43,291][INFO ][o.e.e.NodeEnvironment ] [laptop] heap size [16gb], compressed ordinary object pointers [true]",
|
||||
"[2024-03-05T10:52:46,098][INFO ][o.e.x.s.Security ] [laptop] Security is enabled",
|
||||
"[2024-03-05T10:52:47,227][INFO ][o.e.x.p.ProfilingPlugin ] [laptop] Profiling is enabled",
|
||||
"[2024-03-05T10:52:47,259][INFO ][o.e.x.p.ProfilingPlugin ] [laptop] profiling index templates will not be installed or reinstalled",
|
||||
"[2024-03-05T10:52:47,755][INFO ][o.e.i.r.RecoverySettings ] [laptop] using rate limit [40mb] with [default=40mb, read=0b, write=0b, max=0b]",
|
||||
"[2024-03-05T10:52:47,787][INFO ][o.e.d.DiscoveryModule ] [laptop] using discovery type [multi-node] and seed hosts providers [settings]",
|
||||
"[2024-03-05T10:52:49,188][INFO ][o.e.n.Node ] [laptop] initialized",
|
||||
"[2024-03-05T10:52:49,199][INFO ][o.e.n.Node ] [laptop] starting ...",
|
||||
],
|
||||
},
|
||||
const response = await client.textStructure.findMessageStructure({
|
||||
messages: [
|
||||
"[2024-03-05T10:52:36,256][INFO ][o.a.l.u.VectorUtilPanamaProvider] [laptop] Java vector incubator API enabled; uses preferredBitSize=128",
|
||||
"[2024-03-05T10:52:41,038][INFO ][o.e.p.PluginsService ] [laptop] loaded module [repository-url]",
|
||||
"[2024-03-05T10:52:41,042][INFO ][o.e.p.PluginsService ] [laptop] loaded module [rest-root]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-core]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-redact]",
|
||||
"[2024-03-05T10:52:41,043][INFO ][o.e.p.PluginsService ] [laptop] loaded module [ingest-user-agent]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-monitoring]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [repository-s3]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-analytics]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-ent-search]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-autoscaling]",
|
||||
"[2024-03-05T10:52:41,044][INFO ][o.e.p.PluginsService ] [laptop] loaded module [lang-painless]]",
|
||||
"[2024-03-05T10:52:41,059][INFO ][o.e.p.PluginsService ] [laptop] loaded module [lang-expression]",
|
||||
"[2024-03-05T10:52:41,059][INFO ][o.e.p.PluginsService ] [laptop] loaded module [x-pack-eql]",
|
||||
"[2024-03-05T10:52:43,291][INFO ][o.e.e.NodeEnvironment ] [laptop] heap size [16gb], compressed ordinary object pointers [true]",
|
||||
"[2024-03-05T10:52:46,098][INFO ][o.e.x.s.Security ] [laptop] Security is enabled",
|
||||
"[2024-03-05T10:52:47,227][INFO ][o.e.x.p.ProfilingPlugin ] [laptop] Profiling is enabled",
|
||||
"[2024-03-05T10:52:47,259][INFO ][o.e.x.p.ProfilingPlugin ] [laptop] profiling index templates will not be installed or reinstalled",
|
||||
"[2024-03-05T10:52:47,755][INFO ][o.e.i.r.RecoverySettings ] [laptop] using rate limit [40mb] with [default=40mb, read=0b, write=0b, max=0b]",
|
||||
"[2024-03-05T10:52:47,787][INFO ][o.e.d.DiscoveryModule ] [laptop] using discovery type [multi-node] and seed hosts providers [settings]",
|
||||
"[2024-03-05T10:52:49,188][INFO ][o.e.n.Node ] [laptop] initialized",
|
||||
"[2024-03-05T10:52:49,199][INFO ][o.e.n.Node ] [laptop] starting ...",
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -5,10 +5,8 @@
|
||||
----
|
||||
const response = await client.cluster.putSettings({
|
||||
persistent: {
|
||||
"cluster.routing.allocation.disk.watermark.low": "100gb",
|
||||
"cluster.routing.allocation.disk.watermark.high": "50gb",
|
||||
"cluster.routing.allocation.disk.watermark.flood_stage": "10gb",
|
||||
"cluster.info.update.interval": "1m",
|
||||
"cluster.routing.allocation.disk.watermark.low": "90%",
|
||||
"cluster.routing.allocation.disk.watermark.high": "95%",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
35
docs/doc_examples/790684b45bef2bb848ea932f0fd0cfbd.asciidoc
Normal file
35
docs/doc_examples/790684b45bef2bb848ea932f0fd0cfbd.asciidoc
Normal file
@ -0,0 +1,35 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
query: {
|
||||
intervals: {
|
||||
my_text: {
|
||||
all_of: {
|
||||
ordered: false,
|
||||
max_gaps: 1,
|
||||
intervals: [
|
||||
{
|
||||
match: {
|
||||
query: "my favorite food",
|
||||
max_gaps: 0,
|
||||
ordered: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
match: {
|
||||
query: "cold porridge",
|
||||
max_gaps: 4,
|
||||
ordered: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,9 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "DELETE",
|
||||
path: "/_ingest/ip_location/database/my-database-id",
|
||||
const response = await client.ingest.deleteIpLocationDatabase({
|
||||
id: "my-database-id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -7,14 +7,14 @@ const response = await client.indices.create({
|
||||
index: "test-index",
|
||||
mappings: {
|
||||
properties: {
|
||||
infer_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: "my-elser-endpoint",
|
||||
},
|
||||
source_field: {
|
||||
type: "text",
|
||||
copy_to: "infer_field",
|
||||
},
|
||||
infer_field: {
|
||||
type: "semantic_text",
|
||||
inference_id: ".elser-2-elasticsearch",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
37
docs/doc_examples/7dd0d9cc6c5982a2c003d301e90feeba.asciidoc
Normal file
37
docs/doc_examples/7dd0d9cc6c5982a2c003d301e90feeba.asciidoc
Normal file
@ -0,0 +1,37 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
daily_sales: {
|
||||
date_histogram: {
|
||||
field: "order_date",
|
||||
calendar_interval: "day",
|
||||
format: "yyyy-MM-dd",
|
||||
},
|
||||
aggs: {
|
||||
revenue: {
|
||||
sum: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
unique_customers: {
|
||||
cardinality: {
|
||||
field: "customer_id",
|
||||
},
|
||||
},
|
||||
avg_basket_size: {
|
||||
avg: {
|
||||
field: "total_quantity",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -12,7 +12,7 @@ const response = await client.ingest.putPipeline({
|
||||
field: "data",
|
||||
indexed_chars: 11,
|
||||
indexed_chars_field: "max_size",
|
||||
remove_binary: false,
|
||||
remove_binary: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
@ -3,30 +3,26 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_security/api_key/_bulk_update",
|
||||
body: {
|
||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||
role_descriptors: {
|
||||
"role-a": {
|
||||
indices: [
|
||||
{
|
||||
names: ["*"],
|
||||
privileges: ["write"],
|
||||
},
|
||||
],
|
||||
},
|
||||
const response = await client.security.bulkUpdateApiKeys({
|
||||
ids: ["VuaCfGcBCdbkQm-e5aOx", "H3_AhoIBA9hmeQJdg7ij"],
|
||||
role_descriptors: {
|
||||
"role-a": {
|
||||
indices: [
|
||||
{
|
||||
names: ["*"],
|
||||
privileges: ["write"],
|
||||
},
|
||||
],
|
||||
},
|
||||
metadata: {
|
||||
environment: {
|
||||
level: 2,
|
||||
trusted: true,
|
||||
tags: ["production"],
|
||||
},
|
||||
},
|
||||
expiration: "30d",
|
||||
},
|
||||
metadata: {
|
||||
environment: {
|
||||
level: 2,
|
||||
trusted: true,
|
||||
tags: ["production"],
|
||||
},
|
||||
},
|
||||
expiration: "30d",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
33
docs/doc_examples/82bb6c61dab959f4446dc5ecab7ecbdf.asciidoc
Normal file
33
docs/doc_examples/82bb6c61dab959f4446dc5ecab7ecbdf.asciidoc
Normal file
@ -0,0 +1,33 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.inference.chatCompletionUnified({
|
||||
inference_id: "openai-completion",
|
||||
chat_completion_request: {
|
||||
messages: [
|
||||
{
|
||||
role: "assistant",
|
||||
content: "Let's find out what the weather is",
|
||||
tool_calls: [
|
||||
{
|
||||
id: "call_KcAjWtAww20AihPHphUh46Gd",
|
||||
type: "function",
|
||||
function: {
|
||||
name: "get_current_weather",
|
||||
arguments: '{"location":"Boston, MA"}',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
role: "tool",
|
||||
content: "The weather is cold",
|
||||
tool_call_id: "call_KcAjWtAww20AihPHphUh46Gd",
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -4,9 +4,11 @@
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.putSettings({
|
||||
index: "my-index-000001",
|
||||
index: ".reindexed-v9-ml-anomalies-custom-example",
|
||||
settings: {
|
||||
"index.search.slowlog.include.user": true,
|
||||
index: {
|
||||
number_of_replicas: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
12
docs/doc_examples/89f547649895176c246bb8c41313ff21.asciidoc
Normal file
12
docs/doc_examples/89f547649895176c246bb8c41313ff21.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.esql.query({
|
||||
query:
|
||||
'\nFROM library\n| EVAL year = DATE_EXTRACT("year", release_date)\n| WHERE page_count > ? AND match(author, ?, {"minimum_should_match": ?})\n| LIMIT 5\n',
|
||||
params: [300, "Frank Herbert", 2],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/8c47c80139f40f25db44f5781ca2dfbe.asciidoc
Normal file
10
docs/doc_examples/8c47c80139f40f25db44f5781ca2dfbe.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.getAlias({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
39
docs/doc_examples/8c639d3eef5c2de29e12bd9c6a42d3d4.asciidoc
Normal file
39
docs/doc_examples/8c639d3eef5c2de29e12bd9c6a42d3d4.asciidoc
Normal file
@ -0,0 +1,39 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.search({
|
||||
index: "kibana_sample_data_ecommerce",
|
||||
size: 0,
|
||||
aggs: {
|
||||
categories: {
|
||||
terms: {
|
||||
field: "category.keyword",
|
||||
size: 5,
|
||||
order: {
|
||||
total_revenue: "desc",
|
||||
},
|
||||
},
|
||||
aggs: {
|
||||
total_revenue: {
|
||||
sum: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
avg_order_value: {
|
||||
avg: {
|
||||
field: "taxful_total_price",
|
||||
},
|
||||
},
|
||||
total_items: {
|
||||
sum: {
|
||||
field: "total_quantity",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,10 +3,9 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "PUT",
|
||||
path: "/_ingest/ip_location/database/my-database-1",
|
||||
body: {
|
||||
const response = await client.ingest.putIpLocationDatabase({
|
||||
id: "my-database-1",
|
||||
configuration: {
|
||||
name: "GeoIP2-Domain",
|
||||
maxmind: {
|
||||
account_id: "1234567",
|
||||
|
||||
42
docs/doc_examples/9250ac57ec81d5192e8ad4c462438489.asciidoc
Normal file
42
docs/doc_examples/9250ac57ec81d5192e8ad4c462438489.asciidoc
Normal file
@ -0,0 +1,42 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.bulk({
|
||||
index: "jinaai-index",
|
||||
operations: [
|
||||
{
|
||||
index: {
|
||||
_index: "jinaai-index",
|
||||
_id: "1",
|
||||
},
|
||||
},
|
||||
{
|
||||
content:
|
||||
"Sarah Johnson is a talented marine biologist working at the Oceanographic Institute. Her groundbreaking research on coral reef ecosystems has garnered international attention and numerous accolades.",
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_index: "jinaai-index",
|
||||
_id: "2",
|
||||
},
|
||||
},
|
||||
{
|
||||
content:
|
||||
"She spends months at a time diving in remote locations, meticulously documenting the intricate relationships between various marine species. ",
|
||||
},
|
||||
{
|
||||
index: {
|
||||
_index: "jinaai-index",
|
||||
_id: "3",
|
||||
},
|
||||
},
|
||||
{
|
||||
content:
|
||||
"Her dedication to preserving these delicate underwater environments has inspired a new generation of conservationists.",
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
32
docs/doc_examples/931817b168e055ecf738785c721125dd.asciidoc
Normal file
32
docs/doc_examples/931817b168e055ecf738785c721125dd.asciidoc
Normal file
@ -0,0 +1,32 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.ingest.putPipeline({
|
||||
id: "query_helper_pipeline",
|
||||
processors: [
|
||||
{
|
||||
script: {
|
||||
source:
|
||||
"ctx.prompt = 'Please generate an elasticsearch search query on index `articles_index` for the following natural language query. Dates are in the field `@timestamp`, document types are in the field `type` (options are `news`, `publication`), categories in the field `category` and can be multiple (options are `medicine`, `pharmaceuticals`, `technology`), and document names are in the field `title` which should use a fuzzy match. Ignore fields which cannot be determined from the natural language query context: ' + ctx.content",
|
||||
},
|
||||
},
|
||||
{
|
||||
inference: {
|
||||
model_id: "openai_chat_completions",
|
||||
input_output: {
|
||||
input_field: "prompt",
|
||||
output_field: "query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
remove: {
|
||||
field: "prompt",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
@ -3,8 +3,6 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.security.queryRole({
|
||||
sort: ["name"],
|
||||
});
|
||||
const response = await client.indices.resolveCluster();
|
||||
console.log(response);
|
||||
----
|
||||
@ -5,6 +5,9 @@
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "retrievers_example_nested",
|
||||
settings: {
|
||||
number_of_shards: 1,
|
||||
},
|
||||
mappings: {
|
||||
properties: {
|
||||
nested_field: {
|
||||
@ -18,6 +21,9 @@ const response = await client.indices.create({
|
||||
dims: 3,
|
||||
similarity: "l2_norm",
|
||||
index: true,
|
||||
index_options: {
|
||||
type: "flat",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -3,9 +3,8 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "DELETE",
|
||||
path: "/_ingest/ip_location/database/example-database-id",
|
||||
const response = await client.ingest.deleteIpLocationDatabase({
|
||||
id: "example-database-id",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
@ -3,10 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_application/analytics/my_analytics_collection/event/search_click",
|
||||
body: {
|
||||
const response = await client.searchApplication.postBehavioralAnalyticsEvent({
|
||||
collection_name: "my_analytics_collection",
|
||||
event_type: "search_click",
|
||||
payload: {
|
||||
session: {
|
||||
id: "1797ca95-91c9-4e2e-b1bd-9c38e6f386a9",
|
||||
},
|
||||
|
||||
@ -3,16 +3,12 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_security/oidc/authenticate",
|
||||
body: {
|
||||
redirect_uri:
|
||||
"https://oidc-kibana.elastic.co:5603/api/security/oidc/callback?code=jtI3Ntt8v3_XvcLzCFGq&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I",
|
||||
state: "4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I",
|
||||
nonce: "WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM",
|
||||
realm: "oidc1",
|
||||
},
|
||||
const response = await client.security.oidcAuthenticate({
|
||||
redirect_uri:
|
||||
"https://oidc-kibana.elastic.co:5603/api/security/oidc/callback?code=jtI3Ntt8v3_XvcLzCFGq&state=4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I",
|
||||
state: "4dbrihtIAt3wBTwo6DxK-vdk-sSyDBV8Yf0AjdkdT5I",
|
||||
nonce: "WaBPH0KqPVdG5HHdSxPRjfoZbXMCicm5v1OiAj0DUFM",
|
||||
realm: "oidc1",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
|
||||
30
docs/doc_examples/9cc952d4a03264b700136cbc45abc8c6.asciidoc
Normal file
30
docs/doc_examples/9cc952d4a03264b700136cbc45abc8c6.asciidoc
Normal file
@ -0,0 +1,30 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.create({
|
||||
index: "my-rank-vectors-byte",
|
||||
mappings: {
|
||||
properties: {
|
||||
my_vector: {
|
||||
type: "rank_vectors",
|
||||
element_type: "byte",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
const response1 = await client.index({
|
||||
index: "my-rank-vectors-byte",
|
||||
id: 1,
|
||||
document: {
|
||||
my_vector: [
|
||||
[1, 2, 3],
|
||||
[4, 5, 6],
|
||||
],
|
||||
},
|
||||
});
|
||||
console.log(response1);
|
||||
----
|
||||
@ -3,13 +3,10 @@
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.transport.request({
|
||||
method: "POST",
|
||||
path: "/_application/search_application/my_search_application/_render_query",
|
||||
body: {
|
||||
params: {
|
||||
query_string: "rock climbing",
|
||||
},
|
||||
const response = await client.searchApplication.renderQuery({
|
||||
name: "my_search_application",
|
||||
params: {
|
||||
query_string: "rock climbing",
|
||||
},
|
||||
});
|
||||
console.log(response);
|
||||
|
||||
12
docs/doc_examples/a46f566ca031375658c22f89b87dc6d2.asciidoc
Normal file
12
docs/doc_examples/a46f566ca031375658c22f89b87dc6d2.asciidoc
Normal file
@ -0,0 +1,12 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.cat.indices({
|
||||
index: ".ml-anomalies-custom-example",
|
||||
v: "true",
|
||||
h: "index,store.size",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
10
docs/doc_examples/a60aaed30d7d26eaacbb2c0ed4ddc66d.asciidoc
Normal file
10
docs/doc_examples/a60aaed30d7d26eaacbb2c0ed4ddc66d.asciidoc
Normal file
@ -0,0 +1,10 @@
|
||||
// This file is autogenerated, DO NOT EDIT
|
||||
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
|
||||
|
||||
[source, js]
|
||||
----
|
||||
const response = await client.indices.cancelMigrateReindex({
|
||||
index: "my-data-stream",
|
||||
});
|
||||
console.log(response);
|
||||
----
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user