Auto-generated code for main (#2357)

This commit is contained in:
Elastic Machine
2024-09-03 15:36:33 +01:00
committed by GitHub
parent 9e08aaebe2
commit 132d6d6062
82 changed files with 797 additions and 418 deletions

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/my-e5-model", inference_id: "my-e5-model",
body: { inference_config: {
service: "elasticsearch", service: "elasticsearch",
service_settings: { service_settings: {
num_allocations: 1, num_allocations: 1,

View File

@ -3,13 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateApiKeyId({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_api_key_id", api_key_id: "my-api-key-id",
body: { api_key_secret_id: "my-connector-secret-id",
api_key_id: "my-api-key-id",
api_key_secret_id: "my-connector-secret-id",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/google_vertex_ai_embeddings", inference_id: "google_vertex_ai_embeddings",
body: { inference_config: {
service: "googlevertexai", service: "googlevertexai",
service_settings: { service_settings: {
service_account_json: "<service_account_json>", service_account_json: "<service_account_json>",

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/amazon_bedrock_embeddings", inference_id: "amazon_bedrock_embeddings",
body: { inference_config: {
service: "amazonbedrock", service: "amazonbedrock",
service_settings: { service_settings: {
access_key: "<aws_access_key>", access_key: "<aws_access_key>",

View File

@ -3,29 +3,26 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateFiltering({
method: "PUT", connector_id: "my-g-drive-connector",
path: "/_connector/my-g-drive-connector/_filtering", rules: [
body: { {
rules: [ field: "file_extension",
{ id: "exclude-txt-files",
field: "file_extension", order: 0,
id: "exclude-txt-files", policy: "exclude",
order: 0, rule: "equals",
policy: "exclude", value: "txt",
rule: "equals", },
value: "txt", {
}, field: "_",
{ id: "DEFAULT",
field: "_", order: 1,
id: "DEFAULT", policy: "include",
order: 1, rule: "regex",
policy: "include", value: ".*",
rule: "regex", },
value: ".*", ],
},
],
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,13 +3,11 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.inference({
method: "POST", task_type: "sparse_embedding",
path: "/_inference/sparse_embedding/my-elser-model", inference_id: "my-elser-model",
body: { input:
input: "The sky above the port was the color of television tuned to a dead channel.",
"The sky above the port was the color of television tuned to a dead channel.",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -0,0 +1,23 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "google-vertex-ai-embeddings",
mappings: {
properties: {
content_embedding: {
type: "dense_vector",
dims: 768,
element_type: "float",
similarity: "dot_product",
},
content: {
type: "text",
},
},
},
});
console.log(response);
----

View File

@ -3,9 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.ingest.deleteGeoipDatabase({ const response = await client.transport.request({
id: "my-database-id", method: "DELETE",
body: null, path: "/_ingest/geoip/database/my-database-id",
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/openai_embeddings", inference_id: "openai_embeddings",
body: { inference_config: {
service: "openai", service: "openai",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "rerank",
path: "/_inference/rerank/google_vertex_ai_rerank", inference_id: "google_vertex_ai_rerank",
body: { inference_config: {
service: "googlevertexai", service: "googlevertexai",
service_settings: { service_settings: {
service_account_json: "<service_account_json>", service_account_json: "<service_account_json>",

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "sparse_embedding",
path: "/_inference/sparse_embedding/my-elser-model", inference_id: "my-elser-model",
body: { inference_config: {
service: "elser", service: "elser",
service_settings: { service_settings: {
adaptive_allocations: { adaptive_allocations: {

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/azure_ai_studio_embeddings", inference_id: "azure_ai_studio_embeddings",
body: { inference_config: {
service: "azureaistudio", service: "azureaistudio",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,12 +3,8 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.list({
method: "GET", service_type: "sharepoint_online",
path: "/_connector",
querystring: {
service_type: "sharepoint_online",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -0,0 +1,22 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "google-vertex-ai-embeddings",
knn: {
field: "content_embedding",
query_vector_builder: {
text_embedding: {
model_id: "google_vertex_ai_embeddings",
model_text: "Calculate fuel cost",
},
},
k: 10,
num_candidates: 100,
},
_source: ["id", "content"],
});
console.log(response);
----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/azure_ai_studio_embeddings", inference_id: "azure_ai_studio_embeddings",
body: { inference_config: {
service: "azureaistudio", service: "azureaistudio",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,14 +3,11 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.put({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector", index_name: "search-google-drive",
body: { name: "My Connector",
index_name: "search-google-drive", service_type: "google_drive",
name: "My Connector",
service_type: "google_drive",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "completion",
path: "/_inference/completion/anthropic_completion", inference_id: "anthropic_completion",
body: { inference_config: {
service: "anthropic", service: "anthropic",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,9 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.ingest.getGeoipDatabase({ const response = await client.transport.request({
id: "my-database-id", method: "GET",
body: null, path: "/_ingest/geoip/database/my-database-id",
}); });
console.log(response); console.log(response);
---- ----

View File

@ -0,0 +1,20 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "text_embedding",
inference_id: "alibabacloud_ai_search_embeddings",
inference_config: {
service: "alibabacloud-ai-search",
service_settings: {
api_key: "<api_key>",
service_id: "<service_id>",
host: "<host>",
workspace: "<workspace>",
},
},
});
console.log(response);
----

View File

@ -3,16 +3,13 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updatePipeline({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_pipeline", pipeline: {
body: { extract_binary_content: true,
pipeline: { name: "my-connector-pipeline",
extract_binary_content: true, reduce_whitespace: true,
name: "my-connector-pipeline", run_ml_inference: true,
reduce_whitespace: true,
run_ml_inference: true,
},
}, },
}); });
console.log(response); console.log(response);

View File

@ -3,12 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateStatus({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_status", status: "needs_configuration",
body: {
status: "needs_configuration",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/my-msmarco-minilm-model", inference_id: "my-msmarco-minilm-model",
body: { inference_config: {
service: "elasticsearch", service: "elasticsearch",
service_settings: { service_settings: {
num_allocations: 1, num_allocations: 1,

View File

@ -3,9 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.delete({
method: "DELETE", task_type: "sparse_embedding",
path: "/_inference/sparse_embedding/my-elser-model", inference_id: "my-elser-model",
}); });
console.log(response); console.log(response);
---- ----

View File

@ -0,0 +1,20 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "sparse_embedding",
inference_id: "alibabacloud_ai_search_sparse",
inference_config: {
service: "alibabacloud-ai-search",
service_settings: {
api_key: "<api_key>",
service_id: "ops-text-sparse-embedding-001",
host: "default-j01.platform-cn-shanghai.opensearch.aliyuncs.com",
workspace: "default",
},
},
});
console.log(response);
----

View File

@ -3,9 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.ingest.deleteGeoipDatabase({ const response = await client.transport.request({
id: "example-database-id", method: "DELETE",
body: null, path: "/_ingest/geoip/database/example-database-id",
}); });
console.log(response); console.log(response);
---- ----

View File

@ -0,0 +1,22 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.indices.create({
index: "alibabacloud-ai-search-embeddings",
mappings: {
properties: {
content_embedding: {
type: "dense_vector",
dims: 1024,
element_type: "float",
},
content: {
type: "text",
},
},
},
});
console.log(response);
----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "completion",
path: "/_inference/completion/openai-completion", inference_id: "openai-completion",
body: { inference_config: {
service: "openai", service: "openai",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,21 +3,18 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.lastSync({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_last_sync", last_access_control_sync_error: "Houston, we have a problem!",
body: { last_access_control_sync_scheduled_at: "2023-11-09T15:13:08.231Z",
last_access_control_sync_error: "Houston, we have a problem!", last_access_control_sync_status: "pending",
last_access_control_sync_scheduled_at: "2023-11-09T15:13:08.231Z", last_deleted_document_count: 42,
last_access_control_sync_status: "pending", last_incremental_sync_scheduled_at: "2023-11-09T15:13:08.231Z",
last_deleted_document_count: 42, last_indexed_document_count: 42,
last_incremental_sync_scheduled_at: "2023-11-09T15:13:08.231Z", last_sync_error: "Houston, we have a problem!",
last_indexed_document_count: 42, last_sync_scheduled_at: "2024-11-09T15:13:08.231Z",
last_sync_error: "Houston, we have a problem!", last_sync_status: "completed",
last_sync_scheduled_at: "2024-11-09T15:13:08.231Z", last_synced: "2024-11-09T15:13:08.231Z",
last_sync_status: "completed",
last_synced: "2024-11-09T15:13:08.231Z",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,12 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateIndexName({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_index_name", index_name: "data-from-my-google-drive",
body: {
index_name: "data-from-my-google-drive",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "completion",
path: "/_inference/completion/azure_ai_studio_completion", inference_id: "azure_ai_studio_completion",
body: { inference_config: {
service: "azureaistudio", service: "azureaistudio",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,15 +3,13 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.inference({
method: "POST", task_type: "text_embedding",
path: "/_inference/text_embedding/my-cohere-endpoint", inference_id: "my-cohere-endpoint",
body: { input:
input: "The sky above the port was the color of television tuned to a dead channel.",
"The sky above the port was the color of television tuned to a dead channel.", task_settings: {
task_settings: { input_type: "ingest",
input_type: "ingest",
},
}, },
}); });
console.log(response); console.log(response);

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/amazon_bedrock_embeddings", inference_id: "amazon_bedrock_embeddings",
body: { inference_config: {
service: "amazonbedrock", service: "amazonbedrock",
service_settings: { service_settings: {
access_key: "<aws_access_key>", access_key: "<aws_access_key>",

View File

@ -3,12 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.delete({
method: "DELETE", connector_id: "another-connector",
path: "/_connector/another-connector", delete_sync_jobs: "true",
querystring: {
delete_sync_jobs: "true",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/azure_openai_embeddings", inference_id: "azure_openai_embeddings",
body: { inference_config: {
service: "azureopenai", service: "azureopenai",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -0,0 +1,21 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "google_vertex_ai_embeddings",
processors: [
{
inference: {
model_id: "google_vertex_ai_embeddings",
input_output: {
input_field: "content",
output_field: "content_embedding",
},
},
},
],
});
console.log(response);
----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "completion",
path: "/_inference/completion/amazon_bedrock_completion", inference_id: "amazon_bedrock_completion",
body: { inference_config: {
service: "amazonbedrock", service: "amazonbedrock",
service_settings: { service_settings: {
access_key: "<aws_access_key>", access_key: "<aws_access_key>",

View File

@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.reindex({
wait_for_completion: "false",
source: {
index: "test-data",
size: 50,
},
dest: {
index: "alibabacloud-ai-search-embeddings",
pipeline: "alibabacloud_ai_search_embeddings",
},
});
console.log(response);
----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/my-e5-model", inference_id: "my-e5-model",
body: { inference_config: {
service: "elasticsearch", service: "elasticsearch",
service_settings: { service_settings: {
adaptive_allocations: { adaptive_allocations: {

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/mistral_embeddings", inference_id: "mistral_embeddings",
body: { inference_config: {
service: "mistral", service: "mistral",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "rerank",
path: "/_inference/rerank/cohere-rerank", inference_id: "cohere-rerank",
body: { inference_config: {
service: "cohere", service: "cohere",
service_settings: { service_settings: {
api_key: "<API-KEY>", api_key: "<API-KEY>",

View File

@ -3,23 +3,20 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateScheduling({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_scheduling", scheduling: {
body: { access_control: {
scheduling: { enabled: true,
access_control: { interval: "0 10 0 * * ?",
enabled: true, },
interval: "0 10 0 * * ?", full: {
}, enabled: true,
full: { interval: "0 20 0 * * ?",
enabled: true, },
interval: "0 20 0 * * ?", incremental: {
}, enabled: false,
incremental: { interval: "0 30 0 * * ?",
enabled: false,
interval: "0 30 0 * * ?",
},
}, },
}, },
}); });

View File

@ -0,0 +1,22 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.search({
index: "alibabacloud-ai-search-embeddings",
knn: {
field: "content_embedding",
query_vector_builder: {
text_embedding: {
model_id: "alibabacloud_ai_search_embeddings",
model_text: "Calculate fuel cost",
},
},
k: 10,
num_candidates: 100,
},
_source: ["id", "content"],
});
console.log(response);
----

View File

@ -3,8 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.ingest.putGeoipDatabase({ const response = await client.transport.request({
id: "my-database-id", method: "PUT",
path: "/_ingest/geoip/database/my-database-id",
body: { body: {
name: "GeoIP2-Domain", name: "GeoIP2-Domain",
maxmind: { maxmind: {

View File

@ -0,0 +1,21 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.ingest.putPipeline({
id: "alibabacloud_ai_search_embeddings",
processors: [
{
inference: {
model_id: "alibabacloud_ai_search_embeddings",
input_output: {
input_field: "content",
output_field: "content_embedding",
},
},
},
],
});
console.log(response);
----

View File

@ -0,0 +1,20 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "text_embedding",
inference_id: "alibabacloud_ai_search_embeddings",
inference_config: {
service: "alibabacloud-ai-search",
service_settings: {
api_key: "<api_key>",
service_id: "ops-text-embedding-001",
host: "default-j01.platform-cn-shanghai.opensearch.aliyuncs.com",
workspace: "default",
},
},
});
console.log(response);
----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/mistral-embeddings-test", inference_id: "mistral-embeddings-test",
body: { inference_config: {
service: "mistral", service: "mistral",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/cohere-embeddings", inference_id: "cohere-embeddings",
body: { inference_config: {
service: "cohere", service: "cohere",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,17 +3,14 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateConfiguration({
method: "PUT", connector_id: "my-spo-connector",
path: "/_connector/my-spo-connector/_configuration", values: {
body: { tenant_id: "my-tenant-id",
values: { tenant_name: "my-sharepoint-site",
tenant_id: "my-tenant-id", client_id: "foo",
tenant_name: "my-sharepoint-site", secret_value: "bar",
client_id: "foo", site_collections: "*",
secret_value: "bar",
site_collections: "*",
},
}, },
}); });
console.log(response); console.log(response);

View File

@ -0,0 +1,20 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "text_embedding",
inference_id: "google_vertex_ai_embeddings",
inference_config: {
service: "googlevertexai",
service_settings: {
service_account_json: "<service_account_json>",
model_id: "text-embedding-004",
location: "<location>",
project_id: "<project_id>",
},
},
});
console.log(response);
----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/openai-embeddings", inference_id: "openai-embeddings",
body: { inference_config: {
service: "openai", service: "openai",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "sparse_embedding",
path: "/_inference/sparse_embedding/elser_embeddings", inference_id: "elser_embeddings",
body: { inference_config: {
service: "elser", service: "elser",
service_settings: { service_settings: {
num_allocations: 1, num_allocations: 1,

View File

@ -3,9 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.get({
method: "GET", task_type: "sparse_embedding",
path: "/_inference/sparse_embedding/my-elser-model", inference_id: "my-elser-model",
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/hugging_face_embeddings", inference_id: "hugging_face_embeddings",
body: { inference_config: {
service: "hugging_face", service: "hugging_face",
service_settings: { service_settings: {
api_key: "<access_token>", api_key: "<access_token>",

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/azure_openai_embeddings", inference_id: "azure_openai_embeddings",
body: { inference_config: {
service: "azureopenai", service: "azureopenai",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -0,0 +1,18 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.reindex({
wait_for_completion: "false",
source: {
index: "test-data",
size: 50,
},
dest: {
index: "google-vertex-ai-embeddings",
pipeline: "google_vertex_ai_embeddings",
},
});
console.log(response);
----

View File

@ -3,9 +3,6 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.list();
method: "GET",
path: "/_connector",
});
console.log(response); console.log(response);
---- ----

View File

@ -0,0 +1,20 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.inference.put({
task_type: "rerank",
inference_id: "alibabacloud_ai_search_rerank",
inference_config: {
service: "alibabacloud-ai-search",
service_settings: {
api_key: "<api_key>",
service_id: "ops-bge-reranker-larger",
host: "default-j01.platform-cn-shanghai.opensearch.aliyuncs.com",
workspace: "default",
},
},
});
console.log(response);
----

View File

@ -0,0 +1,12 @@
// This file is autogenerated, DO NOT EDIT
// Use `node scripts/generate-docs-examples.js` to generate the docs examples
[source, js]
----
const response = await client.openPointInTime({
index: "my-index-000001",
keep_alive: "1m",
allow_partial_search_results: "true",
});
console.log(response);
----

View File

@ -3,12 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.inference({
method: "POST", task_type: "completion",
path: "/_inference/completion/openai_chat_completions", inference_id: "openai_chat_completions",
body: { input: "What is Elastic?",
input: "What is Elastic?",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "sparse_embedding",
path: "/_inference/sparse_embedding/my-elser-endpoint", inference_id: "my-elser-endpoint",
body: { inference_config: {
service: "elser", service: "elser",
service_settings: { service_settings: {
num_allocations: 1, num_allocations: 1,

View File

@ -3,13 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateName({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_name", name: "Custom connector",
body: { description: "This is my customized connector",
name: "Custom connector",
description: "This is my customized connector",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,13 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateConfiguration({
method: "PUT", connector_id: "my-spo-connector",
path: "/_connector/my-spo-connector/_configuration", values: {
body: { secret_value: "foo-bar",
values: {
secret_value: "foo-bar",
},
}, },
}); });
console.log(response); console.log(response);

View File

@ -3,12 +3,8 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.list({
method: "GET", index_name: "search-google-drive",
path: "/_connector",
querystring: {
index_name: "search-google-drive",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,12 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateServiceType({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_service_type", service_type: "sharepoint_online",
body: {
service_type: "sharepoint_online",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,16 +3,13 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.put({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector", index_name: "search-google-drive",
body: { name: "My Connector",
index_name: "search-google-drive", description: "My Connector to sync data to Elastic index from Google Drive",
name: "My Connector", service_type: "google_drive",
description: "My Connector to sync data to Elastic index from Google Drive", language: "english",
service_type: "google_drive",
language: "english",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "sparse_embedding",
path: "/_inference/sparse_embedding/my-elser-model", inference_id: "my-elser-model",
body: { inference_config: {
service: "elser", service: "elser",
service_settings: { service_settings: {
num_allocations: 1, num_allocations: 1,

View File

@ -3,9 +3,8 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.checkIn({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_check_in",
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,13 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.list({
method: "GET", from: 0,
path: "/_connector", size: 2,
querystring: {
from: "0",
size: "2",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/cohere_embeddings", inference_id: "cohere_embeddings",
body: { inference_config: {
service: "cohere", service: "cohere",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "completion",
path: "/_inference/completion/google_ai_studio_completion", inference_id: "google_ai_studio_completion",
body: { inference_config: {
service: "googleaistudio", service: "googleaistudio",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,12 +3,8 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.list({
method: "GET", service_type: "sharepoint_online,google_drive",
path: "/_connector",
querystring: {
service_type: "sharepoint_online,google_drive",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,15 +3,12 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateScheduling({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_scheduling", scheduling: {
body: { full: {
scheduling: { enabled: true,
full: { interval: "0 10 0 * * ?",
enabled: true,
interval: "0 10 0 * * ?",
},
}, },
}, },
}); });

View File

@ -3,12 +3,9 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateError({
method: "PUT", connector_id: "my-connector",
path: "/_connector/my-connector/_error", error: "Houston, we have a problem!",
body: {
error: "Houston, we have a problem!",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "text_embedding",
path: "/_inference/text_embedding/hugging-face-embeddings", inference_id: "hugging-face-embeddings",
body: { inference_config: {
service: "hugging_face", service: "hugging_face",
service_settings: { service_settings: {
api_key: "<access_token>", api_key: "<access_token>",

View File

@ -3,13 +3,11 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.inference({
method: "POST", task_type: "rerank",
path: "/_inference/rerank/cohere_rerank", inference_id: "cohere_rerank",
body: { input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"],
input: ["luke", "like", "leia", "chewy", "r2d2", "star", "wars"], query: "star wars main character",
query: "star wars main character",
},
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,9 +3,8 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.get({
method: "GET", connector_id: "my-connector",
path: "/_connector/my-connector",
}); });
console.log(response); console.log(response);
---- ----

View File

@ -3,10 +3,10 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.inference.put({
method: "PUT", task_type: "completion",
path: "/_inference/completion/azure_openai_completion", inference_id: "azure_openai_completion",
body: { inference_config: {
service: "azureopenai", service: "azureopenai",
service_settings: { service_settings: {
api_key: "<api_key>", api_key: "<api_key>",

View File

@ -3,19 +3,16 @@
[source, js] [source, js]
---- ----
const response = await client.transport.request({ const response = await client.connector.updateFiltering({
method: "PUT", connector_id: "my-sql-connector",
path: "/_connector/my-sql-connector/_filtering", advanced_snippet: {
body: { value: [
advanced_snippet: { {
value: [ tables: ["users", "orders"],
{ query:
tables: ["users", "orders"], "SELECT users.id AS id, orders.order_id AS order_id FROM users JOIN orders ON users.id = orders.user_id",
query: },
"SELECT users.id AS id, orders.order_id AS order_id FROM users JOIN orders ON users.id = orders.user_id", ],
},
],
},
}, },
}); });
console.log(response); console.log(response);

View File

@ -5667,12 +5667,20 @@ client.inference.put({ inference_id })
=== ingest === ingest
[discrete] [discrete]
==== delete_geoip_database ==== delete_geoip_database
Deletes a geoip database configuration Deletes a geoip database configuration.
[source,ts] [source,ts]
---- ----
client.ingest.deleteGeoipDatabase() client.ingest.deleteGeoipDatabase({ id })
---- ----
[discrete]
==== Arguments
* *Request (object):*
** *`id` (string | string[])*: A list of geoip database configurations to delete
** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node.
If no response is received before the timeout expires, the request fails and returns an error.
** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error.
[discrete] [discrete]
==== delete_pipeline ==== delete_pipeline
@ -5708,12 +5716,21 @@ client.ingest.geoIpStats()
[discrete] [discrete]
==== get_geoip_database ==== get_geoip_database
Returns geoip database configuration. Returns information about one or more geoip database configurations.
[source,ts] [source,ts]
---- ----
client.ingest.getGeoipDatabase() client.ingest.getGeoipDatabase({ ... })
---- ----
[discrete]
==== Arguments
* *Request (object):*
** *`id` (Optional, string | string[])*: List of database configuration IDs to retrieve.
Wildcard (`*`) expressions are supported.
To get all database configurations, omit this parameter or use `*`.
** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node.
If no response is received before the timeout expires, the request fails and returns an error.
[discrete] [discrete]
==== get_pipeline ==== get_pipeline
@ -5752,12 +5769,23 @@ client.ingest.processorGrok()
[discrete] [discrete]
==== put_geoip_database ==== put_geoip_database
Puts the configuration for a geoip database to be downloaded Returns information about one or more geoip database configurations.
[source,ts] [source,ts]
---- ----
client.ingest.putGeoipDatabase() client.ingest.putGeoipDatabase({ id, name, maxmind })
---- ----
[discrete]
==== Arguments
* *Request (object):*
** *`id` (string)*: ID of the database configuration to create or update.
** *`name` (string)*: The provider-assigned name of the IP geolocation database to download.
** *`maxmind` ({ account_id })*: The configuration necessary to identify which IP geolocation provider to use to download the database, as well as any provider-specific configuration necessary for such downloading.
At present, the only supported provider is maxmind, and the maxmind provider requires that an account_id (string) is configured.
** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node.
If no response is received before the timeout expires, the request fails and returns an error.
** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error.
[discrete] [discrete]
==== put_pipeline ==== put_pipeline
@ -5777,8 +5805,8 @@ client.ingest.putPipeline({ id })
** *`id` (string)*: ID of the ingest pipeline to create or update. ** *`id` (string)*: ID of the ingest pipeline to create or update.
** *`_meta` (Optional, Record<string, User-defined value>)*: Optional metadata about the ingest pipeline. May have any contents. This map is not automatically generated by Elasticsearch. ** *`_meta` (Optional, Record<string, User-defined value>)*: Optional metadata about the ingest pipeline. May have any contents. This map is not automatically generated by Elasticsearch.
** *`description` (Optional, string)*: Description of the ingest pipeline. ** *`description` (Optional, string)*: Description of the ingest pipeline.
** *`on_failure` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, user_agent }[])*: Processors to run immediately after a processor failure. Each processor supports a processor-level `on_failure` value. If a processor without an `on_failure` value fails, Elasticsearch uses this pipeline-level parameter as a fallback. The processors in this parameter run sequentially in the order specified. Elasticsearch will not attempt to run the pipeline's remaining processors. ** *`on_failure` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, html_strip, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, uri_parts, user_agent }[])*: Processors to run immediately after a processor failure. Each processor supports a processor-level `on_failure` value. If a processor without an `on_failure` value fails, Elasticsearch uses this pipeline-level parameter as a fallback. The processors in this parameter run sequentially in the order specified. Elasticsearch will not attempt to run the pipeline's remaining processors.
** *`processors` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, user_agent }[])*: Processors used to perform transformations on documents before indexing. Processors run sequentially in the order specified. ** *`processors` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, html_strip, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, uri_parts, user_agent }[])*: Processors used to perform transformations on documents before indexing. Processors run sequentially in the order specified.
** *`version` (Optional, number)*: Version number used by external systems to track ingest pipelines. This parameter is intended for external systems only. Elasticsearch does not use or validate pipeline version numbers. ** *`version` (Optional, number)*: Version number used by external systems to track ingest pipelines. This parameter is intended for external systems only. Elasticsearch does not use or validate pipeline version numbers.
** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. ** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error.
** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error. ** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error.
@ -5791,16 +5819,16 @@ Executes an ingest pipeline against a set of provided documents.
{ref}/simulate-pipeline-api.html[Endpoint documentation] {ref}/simulate-pipeline-api.html[Endpoint documentation]
[source,ts] [source,ts]
---- ----
client.ingest.simulate({ ... }) client.ingest.simulate({ docs })
---- ----
[discrete] [discrete]
==== Arguments ==== Arguments
* *Request (object):* * *Request (object):*
** *`docs` ({ _id, _index, _source }[])*: Sample documents to test in the pipeline.
** *`id` (Optional, string)*: Pipeline to test. ** *`id` (Optional, string)*: Pipeline to test.
If you dont specify a `pipeline` in the request body, this parameter is required. If you dont specify a `pipeline` in the request body, this parameter is required.
** *`docs` (Optional, { _id, _index, _source }[])*: Sample documents to test in the pipeline.
** *`pipeline` (Optional, { description, on_failure, processors, version, _meta })*: Pipeline to test. ** *`pipeline` (Optional, { description, on_failure, processors, version, _meta })*: Pipeline to test.
If you dont specify the `pipeline` request path parameter, this parameter is required. If you dont specify the `pipeline` request path parameter, this parameter is required.
If you specify both this and the request path parameter, the API only uses the request path parameter. If you specify both this and the request path parameter, the API only uses the request path parameter.

View File

@ -45,22 +45,22 @@ export default class Ingest {
} }
/** /**
* Deletes a geoip database configuration * Deletes a geoip database configuration.
* @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation} * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation}
*/ */
async deleteGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO> async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest | TB.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IngestDeleteGeoipDatabaseResponse>
async deleteGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>> async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest | TB.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestDeleteGeoipDatabaseResponse, unknown>>
async deleteGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO> async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest | TB.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestDeleteGeoipDatabaseResponse>
async deleteGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> { async deleteGeoipDatabase (this: That, params: T.IngestDeleteGeoipDatabaseRequest | TB.IngestDeleteGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
const acceptedPath: string[] = ['id'] const acceptedPath: string[] = ['id']
const querystring: Record<string, any> = {} const querystring: Record<string, any> = {}
const body = undefined const body = undefined
params = params ?? {}
for (const key in params) { for (const key in params) {
if (acceptedPath.includes(key)) { if (acceptedPath.includes(key)) {
continue continue
} else if (key !== 'body') { } else if (key !== 'body') {
// @ts-expect-error
querystring[key] = params[key] querystring[key] = params[key]
} }
} }
@ -139,13 +139,13 @@ export default class Ingest {
} }
/** /**
* Returns geoip database configuration. * Returns information about one or more geoip database configurations.
* @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation} * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation}
*/ */
async getGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO> async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest | TB.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IngestGetGeoipDatabaseResponse>
async getGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>> async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest | TB.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestGetGeoipDatabaseResponse, unknown>>
async getGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO> async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest | TB.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestGetGeoipDatabaseResponse>
async getGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> { async getGeoipDatabase (this: That, params?: T.IngestGetGeoipDatabaseRequest | TB.IngestGetGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
const acceptedPath: string[] = ['id'] const acceptedPath: string[] = ['id']
const querystring: Record<string, any> = {} const querystring: Record<string, any> = {}
const body = undefined const body = undefined
@ -155,6 +155,7 @@ export default class Ingest {
if (acceptedPath.includes(key)) { if (acceptedPath.includes(key)) {
continue continue
} else if (key !== 'body') { } else if (key !== 'body') {
// @ts-expect-error
querystring[key] = params[key] querystring[key] = params[key]
} }
} }
@ -248,22 +249,34 @@ export default class Ingest {
} }
/** /**
* Puts the configuration for a geoip database to be downloaded * Returns information about one or more geoip database configurations.
* @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation} * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/TODO.html | Elasticsearch API documentation}
*/ */
async putGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithOutMeta): Promise<T.TODO> async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest | TB.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IngestPutGeoipDatabaseResponse>
async putGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.TODO, unknown>> async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest | TB.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestPutGeoipDatabaseResponse, unknown>>
async putGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<T.TODO> async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest | TB.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<T.IngestPutGeoipDatabaseResponse>
async putGeoipDatabase (this: That, params?: T.TODO | TB.TODO, options?: TransportRequestOptions): Promise<any> { async putGeoipDatabase (this: That, params: T.IngestPutGeoipDatabaseRequest | TB.IngestPutGeoipDatabaseRequest, options?: TransportRequestOptions): Promise<any> {
const acceptedPath: string[] = ['id'] const acceptedPath: string[] = ['id']
const acceptedBody: string[] = ['name', 'maxmind']
const querystring: Record<string, any> = {} const querystring: Record<string, any> = {}
const body = undefined // @ts-expect-error
const userBody: any = params?.body
let body: Record<string, any> | string
if (typeof userBody === 'string') {
body = userBody
} else {
body = userBody != null ? { ...userBody } : undefined
}
params = params ?? {}
for (const key in params) { for (const key in params) {
if (acceptedPath.includes(key)) { if (acceptedBody.includes(key)) {
body = body ?? {}
// @ts-expect-error
body[key] = params[key]
} else if (acceptedPath.includes(key)) {
continue continue
} else if (key !== 'body') { } else if (key !== 'body') {
// @ts-expect-error
querystring[key] = params[key] querystring[key] = params[key]
} }
} }
@ -327,10 +340,10 @@ export default class Ingest {
* Executes an ingest pipeline against a set of provided documents. * Executes an ingest pipeline against a set of provided documents.
* @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html | Elasticsearch API documentation} * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html | Elasticsearch API documentation}
*/ */
async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IngestSimulateResponse> async simulate (this: That, params: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithOutMeta): Promise<T.IngestSimulateResponse>
async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestSimulateResponse, unknown>> async simulate (this: That, params: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithMeta): Promise<TransportResult<T.IngestSimulateResponse, unknown>>
async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptions): Promise<T.IngestSimulateResponse> async simulate (this: That, params: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptions): Promise<T.IngestSimulateResponse>
async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptions): Promise<any> { async simulate (this: That, params: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptions): Promise<any> {
const acceptedPath: string[] = ['id'] const acceptedPath: string[] = ['id']
const acceptedBody: string[] = ['docs', 'pipeline'] const acceptedBody: string[] = ['docs', 'pipeline']
const querystring: Record<string, any> = {} const querystring: Record<string, any> = {}
@ -343,7 +356,6 @@ export default class Ingest {
body = userBody != null ? { ...userBody } : undefined body = userBody != null ? { ...userBody } : undefined
} }
params = params ?? {}
for (const key in params) { for (const key in params) {
if (acceptedBody.includes(key)) { if (acceptedBody.includes(key)) {
body = body ?? {} body = body ?? {}

View File

@ -1117,7 +1117,7 @@ export interface RenderSearchTemplateResponse {
export interface ScriptsPainlessExecutePainlessContextSetup { export interface ScriptsPainlessExecutePainlessContextSetup {
document: any document: any
index: IndexName index: IndexName
query: QueryDslQueryContainer query?: QueryDslQueryContainer
} }
export interface ScriptsPainlessExecuteRequest extends RequestBase { export interface ScriptsPainlessExecuteRequest extends RequestBase {
@ -4836,11 +4836,11 @@ export type AnalysisPhoneticRuleType = 'approx' | 'exact'
export interface AnalysisPhoneticTokenFilter extends AnalysisTokenFilterBase { export interface AnalysisPhoneticTokenFilter extends AnalysisTokenFilterBase {
type: 'phonetic' type: 'phonetic'
encoder: AnalysisPhoneticEncoder encoder: AnalysisPhoneticEncoder
languageset: AnalysisPhoneticLanguage | AnalysisPhoneticLanguage[] languageset?: AnalysisPhoneticLanguage | AnalysisPhoneticLanguage[]
max_code_len?: integer max_code_len?: integer
name_type: AnalysisPhoneticNameType name_type?: AnalysisPhoneticNameType
replace?: boolean replace?: boolean
rule_type: AnalysisPhoneticRuleType rule_type?: AnalysisPhoneticRuleType
} }
export interface AnalysisPorterStemTokenFilter extends AnalysisTokenFilterBase { export interface AnalysisPorterStemTokenFilter extends AnalysisTokenFilterBase {
@ -12445,6 +12445,11 @@ export interface IngestCsvProcessor extends IngestProcessorBase {
trim?: boolean trim?: boolean
} }
export interface IngestDatabaseConfiguration {
name: Name
maxmind: IngestMaxmind
}
export interface IngestDateIndexNameProcessor extends IngestProcessorBase { export interface IngestDateIndexNameProcessor extends IngestProcessorBase {
date_formats: string[] date_formats: string[]
date_rounding: string date_rounding: string
@ -12523,6 +12528,12 @@ export interface IngestGsubProcessor extends IngestProcessorBase {
target_field?: Field target_field?: Field
} }
export interface IngestHtmlStripProcessor extends IngestProcessorBase {
field: Field
ignore_missing?: boolean
target_field?: Field
}
export interface IngestInferenceConfig { export interface IngestInferenceConfig {
regression?: IngestInferenceConfigRegression regression?: IngestInferenceConfigRegression
classification?: IngestInferenceConfigClassification classification?: IngestInferenceConfigClassification
@ -12584,6 +12595,10 @@ export interface IngestLowercaseProcessor extends IngestProcessorBase {
target_field?: Field target_field?: Field
} }
export interface IngestMaxmind {
account_id: Id
}
export interface IngestPipeline { export interface IngestPipeline {
description?: string description?: string
on_failure?: IngestProcessorContainer[] on_failure?: IngestProcessorContainer[]
@ -12629,6 +12644,7 @@ export interface IngestProcessorContainer {
geoip?: IngestGeoIpProcessor geoip?: IngestGeoIpProcessor
grok?: IngestGrokProcessor grok?: IngestGrokProcessor
gsub?: IngestGsubProcessor gsub?: IngestGsubProcessor
html_strip?: IngestHtmlStripProcessor
inference?: IngestInferenceProcessor inference?: IngestInferenceProcessor
join?: IngestJoinProcessor join?: IngestJoinProcessor
json?: IngestJsonProcessor json?: IngestJsonProcessor
@ -12646,6 +12662,7 @@ export interface IngestProcessorContainer {
trim?: IngestTrimProcessor trim?: IngestTrimProcessor
uppercase?: IngestUppercaseProcessor uppercase?: IngestUppercaseProcessor
urldecode?: IngestUrlDecodeProcessor urldecode?: IngestUrlDecodeProcessor
uri_parts?: IngestUriPartsProcessor
user_agent?: IngestUserAgentProcessor user_agent?: IngestUserAgentProcessor
} }
@ -12716,6 +12733,14 @@ export interface IngestUppercaseProcessor extends IngestProcessorBase {
target_field?: Field target_field?: Field
} }
export interface IngestUriPartsProcessor extends IngestProcessorBase {
field: Field
ignore_missing?: boolean
keep_original?: boolean
remove_if_successful?: boolean
target_field?: Field
}
export interface IngestUrlDecodeProcessor extends IngestProcessorBase { export interface IngestUrlDecodeProcessor extends IngestProcessorBase {
field: Field field: Field
ignore_missing?: boolean ignore_missing?: boolean
@ -12732,6 +12757,14 @@ export interface IngestUserAgentProcessor extends IngestProcessorBase {
export type IngestUserAgentProperty = 'NAME' | 'MAJOR' | 'MINOR' | 'PATCH' | 'OS' | 'OS_NAME' | 'OS_MAJOR' | 'OS_MINOR' | 'DEVICE' | 'BUILD' export type IngestUserAgentProperty = 'NAME' | 'MAJOR' | 'MINOR' | 'PATCH' | 'OS' | 'OS_NAME' | 'OS_MAJOR' | 'OS_MINOR' | 'DEVICE' | 'BUILD'
export interface IngestDeleteGeoipDatabaseRequest extends RequestBase {
id: Ids
master_timeout?: Duration
timeout?: Duration
}
export type IngestDeleteGeoipDatabaseResponse = AcknowledgedResponseBase
export interface IngestDeletePipelineRequest extends RequestBase { export interface IngestDeletePipelineRequest extends RequestBase {
id: Id id: Id
master_timeout?: Duration master_timeout?: Duration
@ -12744,8 +12777,9 @@ export interface IngestGeoIpStatsGeoIpDownloadStatistics {
successful_downloads: integer successful_downloads: integer
failed_downloads: integer failed_downloads: integer
total_download_time: DurationValue<UnitMillis> total_download_time: DurationValue<UnitMillis>
database_count: integer databases_count: integer
skipped_updates: integer skipped_updates: integer
expired_databases: integer
} }
export interface IngestGeoIpStatsGeoIpNodeDatabaseName { export interface IngestGeoIpStatsGeoIpNodeDatabaseName {
@ -12765,6 +12799,22 @@ export interface IngestGeoIpStatsResponse {
nodes: Record<Id, IngestGeoIpStatsGeoIpNodeDatabases> nodes: Record<Id, IngestGeoIpStatsGeoIpNodeDatabases>
} }
export interface IngestGetGeoipDatabaseDatabaseConfigurationMetadata {
id: Id
version: long
modified_date_millis: EpochTime<UnitMillis>
database: IngestDatabaseConfiguration
}
export interface IngestGetGeoipDatabaseRequest extends RequestBase {
id?: Ids
master_timeout?: Duration
}
export interface IngestGetGeoipDatabaseResponse {
databases: IngestGetGeoipDatabaseDatabaseConfigurationMetadata[]
}
export interface IngestGetPipelineRequest extends RequestBase { export interface IngestGetPipelineRequest extends RequestBase {
id?: Id id?: Id
master_timeout?: Duration master_timeout?: Duration
@ -12780,6 +12830,16 @@ export interface IngestProcessorGrokResponse {
patterns: Record<string, string> patterns: Record<string, string>
} }
export interface IngestPutGeoipDatabaseRequest extends RequestBase {
id: Id
master_timeout?: Duration
timeout?: Duration
name: Name
maxmind: IngestMaxmind
}
export type IngestPutGeoipDatabaseResponse = AcknowledgedResponseBase
export interface IngestPutPipelineRequest extends RequestBase { export interface IngestPutPipelineRequest extends RequestBase {
id: Id id: Id
master_timeout?: Duration master_timeout?: Duration
@ -12819,21 +12879,29 @@ export interface IngestSimulateIngest {
export interface IngestSimulatePipelineSimulation { export interface IngestSimulatePipelineSimulation {
doc?: IngestSimulateDocumentSimulation doc?: IngestSimulateDocumentSimulation
processor_results?: IngestSimulatePipelineSimulation[]
tag?: string tag?: string
processor_type?: string processor_type?: string
status?: WatcherActionStatusOptions status?: WatcherActionStatusOptions
description?: string
ignored_error?: ErrorCause
error?: ErrorCause
} }
export interface IngestSimulateRequest extends RequestBase { export interface IngestSimulateRequest extends RequestBase {
id?: Id id?: Id
verbose?: boolean verbose?: boolean
docs?: IngestSimulateDocument[] docs: IngestSimulateDocument[]
pipeline?: IngestPipeline pipeline?: IngestPipeline
} }
export interface IngestSimulateResponse { export interface IngestSimulateResponse {
docs: IngestSimulatePipelineSimulation[] docs: IngestSimulateSimulateDocumentResult[]
}
export interface IngestSimulateSimulateDocumentResult {
doc?: IngestSimulateDocumentSimulation
error?: ErrorCause
processor_results?: IngestSimulatePipelineSimulation[]
} }
export interface LicenseLicense { export interface LicenseLicense {

View File

@ -1162,7 +1162,7 @@ export interface RenderSearchTemplateResponse {
export interface ScriptsPainlessExecutePainlessContextSetup { export interface ScriptsPainlessExecutePainlessContextSetup {
document: any document: any
index: IndexName index: IndexName
query: QueryDslQueryContainer query?: QueryDslQueryContainer
} }
export interface ScriptsPainlessExecuteRequest extends RequestBase { export interface ScriptsPainlessExecuteRequest extends RequestBase {
@ -4909,11 +4909,11 @@ export type AnalysisPhoneticRuleType = 'approx' | 'exact'
export interface AnalysisPhoneticTokenFilter extends AnalysisTokenFilterBase { export interface AnalysisPhoneticTokenFilter extends AnalysisTokenFilterBase {
type: 'phonetic' type: 'phonetic'
encoder: AnalysisPhoneticEncoder encoder: AnalysisPhoneticEncoder
languageset: AnalysisPhoneticLanguage | AnalysisPhoneticLanguage[] languageset?: AnalysisPhoneticLanguage | AnalysisPhoneticLanguage[]
max_code_len?: integer max_code_len?: integer
name_type: AnalysisPhoneticNameType name_type?: AnalysisPhoneticNameType
replace?: boolean replace?: boolean
rule_type: AnalysisPhoneticRuleType rule_type?: AnalysisPhoneticRuleType
} }
export interface AnalysisPorterStemTokenFilter extends AnalysisTokenFilterBase { export interface AnalysisPorterStemTokenFilter extends AnalysisTokenFilterBase {
@ -12670,6 +12670,11 @@ export interface IngestCsvProcessor extends IngestProcessorBase {
trim?: boolean trim?: boolean
} }
export interface IngestDatabaseConfiguration {
name: Name
maxmind: IngestMaxmind
}
export interface IngestDateIndexNameProcessor extends IngestProcessorBase { export interface IngestDateIndexNameProcessor extends IngestProcessorBase {
date_formats: string[] date_formats: string[]
date_rounding: string date_rounding: string
@ -12748,6 +12753,12 @@ export interface IngestGsubProcessor extends IngestProcessorBase {
target_field?: Field target_field?: Field
} }
export interface IngestHtmlStripProcessor extends IngestProcessorBase {
field: Field
ignore_missing?: boolean
target_field?: Field
}
export interface IngestInferenceConfig { export interface IngestInferenceConfig {
regression?: IngestInferenceConfigRegression regression?: IngestInferenceConfigRegression
classification?: IngestInferenceConfigClassification classification?: IngestInferenceConfigClassification
@ -12809,6 +12820,10 @@ export interface IngestLowercaseProcessor extends IngestProcessorBase {
target_field?: Field target_field?: Field
} }
export interface IngestMaxmind {
account_id: Id
}
export interface IngestPipeline { export interface IngestPipeline {
description?: string description?: string
on_failure?: IngestProcessorContainer[] on_failure?: IngestProcessorContainer[]
@ -12854,6 +12869,7 @@ export interface IngestProcessorContainer {
geoip?: IngestGeoIpProcessor geoip?: IngestGeoIpProcessor
grok?: IngestGrokProcessor grok?: IngestGrokProcessor
gsub?: IngestGsubProcessor gsub?: IngestGsubProcessor
html_strip?: IngestHtmlStripProcessor
inference?: IngestInferenceProcessor inference?: IngestInferenceProcessor
join?: IngestJoinProcessor join?: IngestJoinProcessor
json?: IngestJsonProcessor json?: IngestJsonProcessor
@ -12871,6 +12887,7 @@ export interface IngestProcessorContainer {
trim?: IngestTrimProcessor trim?: IngestTrimProcessor
uppercase?: IngestUppercaseProcessor uppercase?: IngestUppercaseProcessor
urldecode?: IngestUrlDecodeProcessor urldecode?: IngestUrlDecodeProcessor
uri_parts?: IngestUriPartsProcessor
user_agent?: IngestUserAgentProcessor user_agent?: IngestUserAgentProcessor
} }
@ -12941,6 +12958,14 @@ export interface IngestUppercaseProcessor extends IngestProcessorBase {
target_field?: Field target_field?: Field
} }
export interface IngestUriPartsProcessor extends IngestProcessorBase {
field: Field
ignore_missing?: boolean
keep_original?: boolean
remove_if_successful?: boolean
target_field?: Field
}
export interface IngestUrlDecodeProcessor extends IngestProcessorBase { export interface IngestUrlDecodeProcessor extends IngestProcessorBase {
field: Field field: Field
ignore_missing?: boolean ignore_missing?: boolean
@ -12957,6 +12982,14 @@ export interface IngestUserAgentProcessor extends IngestProcessorBase {
export type IngestUserAgentProperty = 'NAME' | 'MAJOR' | 'MINOR' | 'PATCH' | 'OS' | 'OS_NAME' | 'OS_MAJOR' | 'OS_MINOR' | 'DEVICE' | 'BUILD' export type IngestUserAgentProperty = 'NAME' | 'MAJOR' | 'MINOR' | 'PATCH' | 'OS' | 'OS_NAME' | 'OS_MAJOR' | 'OS_MINOR' | 'DEVICE' | 'BUILD'
export interface IngestDeleteGeoipDatabaseRequest extends RequestBase {
id: Ids
master_timeout?: Duration
timeout?: Duration
}
export type IngestDeleteGeoipDatabaseResponse = AcknowledgedResponseBase
export interface IngestDeletePipelineRequest extends RequestBase { export interface IngestDeletePipelineRequest extends RequestBase {
id: Id id: Id
master_timeout?: Duration master_timeout?: Duration
@ -12969,8 +13002,9 @@ export interface IngestGeoIpStatsGeoIpDownloadStatistics {
successful_downloads: integer successful_downloads: integer
failed_downloads: integer failed_downloads: integer
total_download_time: DurationValue<UnitMillis> total_download_time: DurationValue<UnitMillis>
database_count: integer databases_count: integer
skipped_updates: integer skipped_updates: integer
expired_databases: integer
} }
export interface IngestGeoIpStatsGeoIpNodeDatabaseName { export interface IngestGeoIpStatsGeoIpNodeDatabaseName {
@ -12990,6 +13024,22 @@ export interface IngestGeoIpStatsResponse {
nodes: Record<Id, IngestGeoIpStatsGeoIpNodeDatabases> nodes: Record<Id, IngestGeoIpStatsGeoIpNodeDatabases>
} }
export interface IngestGetGeoipDatabaseDatabaseConfigurationMetadata {
id: Id
version: long
modified_date_millis: EpochTime<UnitMillis>
database: IngestDatabaseConfiguration
}
export interface IngestGetGeoipDatabaseRequest extends RequestBase {
id?: Ids
master_timeout?: Duration
}
export interface IngestGetGeoipDatabaseResponse {
databases: IngestGetGeoipDatabaseDatabaseConfigurationMetadata[]
}
export interface IngestGetPipelineRequest extends RequestBase { export interface IngestGetPipelineRequest extends RequestBase {
id?: Id id?: Id
master_timeout?: Duration master_timeout?: Duration
@ -13005,6 +13055,19 @@ export interface IngestProcessorGrokResponse {
patterns: Record<string, string> patterns: Record<string, string>
} }
export interface IngestPutGeoipDatabaseRequest extends RequestBase {
id: Id
master_timeout?: Duration
timeout?: Duration
/** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */
body?: {
name: Name
maxmind: IngestMaxmind
}
}
export type IngestPutGeoipDatabaseResponse = AcknowledgedResponseBase
export interface IngestPutPipelineRequest extends RequestBase { export interface IngestPutPipelineRequest extends RequestBase {
id: Id id: Id
master_timeout?: Duration master_timeout?: Duration
@ -13047,10 +13110,12 @@ export interface IngestSimulateIngest {
export interface IngestSimulatePipelineSimulation { export interface IngestSimulatePipelineSimulation {
doc?: IngestSimulateDocumentSimulation doc?: IngestSimulateDocumentSimulation
processor_results?: IngestSimulatePipelineSimulation[]
tag?: string tag?: string
processor_type?: string processor_type?: string
status?: WatcherActionStatusOptions status?: WatcherActionStatusOptions
description?: string
ignored_error?: ErrorCause
error?: ErrorCause
} }
export interface IngestSimulateRequest extends RequestBase { export interface IngestSimulateRequest extends RequestBase {
@ -13058,13 +13123,19 @@ export interface IngestSimulateRequest extends RequestBase {
verbose?: boolean verbose?: boolean
/** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */
body?: { body?: {
docs?: IngestSimulateDocument[] docs: IngestSimulateDocument[]
pipeline?: IngestPipeline pipeline?: IngestPipeline
} }
} }
export interface IngestSimulateResponse { export interface IngestSimulateResponse {
docs: IngestSimulatePipelineSimulation[] docs: IngestSimulateSimulateDocumentResult[]
}
export interface IngestSimulateSimulateDocumentResult {
doc?: IngestSimulateDocumentSimulation
error?: ErrorCause
processor_results?: IngestSimulatePipelineSimulation[]
} }
export interface LicenseLicense { export interface LicenseLicense {