[[reference-inference]] == client.inference //////// =========================================================================================================================== || || || || || || || ██████╗ ███████╗ █████╗ ██████╗ ███╗ ███╗███████╗ || || ██╔══██╗██╔════╝██╔══██╗██╔══██╗████╗ ████║██╔════╝ || || ██████╔╝█████╗ ███████║██║ ██║██╔████╔██║█████╗ || || ██╔══██╗██╔══╝ ██╔══██║██║ ██║██║╚██╔╝██║██╔══╝ || || ██║ ██║███████╗██║ ██║██████╔╝██║ ╚═╝ ██║███████╗ || || ╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═╝╚══════╝ || || || || || || This file is autogenerated, DO NOT send pull requests that changes this file directly. || || You should update the script that does the generation, which can be found in: || || https://github.com/elastic/elastic-client-generator-js || || || || You can run the script with the following command: || || npm run elasticsearch -- --version || || || || || || || =========================================================================================================================== //////// ++++ ++++ [discrete] [[client.inference.delete]] == `client.inference.delete()` Delete an inference endpoint {ref}/delete-inference-api.html[{es} documentation] [discrete] === Function signature [source,ts] ---- (request: InferenceDeleteRequest, options?: TransportRequestOptions) => Promise ---- [discrete] === Request [source,ts,subs=+macros] ---- interface InferenceDeleteRequest extends <> { task_type?: <> inference_id: <> dry_run?: boolean force?: boolean } ---- [discrete] === Response [source,ts,subs=+macros] ---- type InferenceDeleteResponse = <> ---- [discrete] [[client.inference.get]] == `client.inference.get()` Get an inference endpoint {ref}/get-inference-api.html[{es} documentation] [discrete] === Function signature [source,ts] ---- (request: InferenceGetRequest, options?: TransportRequestOptions) => Promise ---- [discrete] === Request [source,ts,subs=+macros] ---- interface InferenceGetRequest extends <> { task_type?: <> inference_id?: <> } ---- [discrete] === Response [source,ts,subs=+macros] ---- interface InferenceGetResponse { endpoints: <>[] } ---- [discrete] [[client.inference.inference]] == `client.inference.inference()` Perform inference on the service {ref}/post-inference-api.html[{es} documentation] [discrete] === Function signature [source,ts] ---- (request: InferenceInferenceRequest, options?: TransportRequestOptions) => Promise ---- [discrete] === Request [source,ts,subs=+macros] ---- interface InferenceInferenceRequest extends <> { task_type?: <> inference_id: <> timeout?: <> query?: string input: string | string[] task_settings?: <> } ---- [discrete] === Response [source,ts,subs=+macros] ---- type InferenceInferenceResponse = <> ---- [discrete] [[client.inference.put]] == `client.inference.put()` Create an inference endpoint {ref}/put-inference-api.html[{es} documentation] [discrete] === Function signature [source,ts] ---- (request: InferencePutRequest, options?: TransportRequestOptions) => Promise ---- [discrete] === Request [source,ts,subs=+macros] ---- interface InferencePutRequest extends <> { task_type?: <> inference_id: <> inference_config?: <> } ---- [discrete] === Response [source,ts,subs=+macros] ---- type InferencePutResponse = <> ---- [discrete] [[client.inference.streamInference]] == `client.inference.streamInference()` Perform streaming inference [discrete] === Function signature [source,ts] ---- (request: InferenceStreamInferenceRequest, options?: TransportRequestOptions) => Promise ----