Skip to main content

Class: abstract ToolCallLLM<AdditionalChatOptions>

Unified language model interface

Extends

Extended by

Type Parameters

AdditionalChatOptions extends object = object

Constructors

new ToolCallLLM()

new ToolCallLLM<AdditionalChatOptions>(): ToolCallLLM<AdditionalChatOptions>

Returns

ToolCallLLM<AdditionalChatOptions>

Inherited from

BaseLLM.constructor

Properties

metadata

abstract metadata: LLMMetadata

Inherited from

BaseLLM.metadata

Defined in

packages/core/llms/dist/index.d.ts:162


supportToolCall

abstract supportToolCall: boolean

Defined in

packages/core/llms/dist/index.d.ts:169

Methods

chat()

chat(params)

abstract chat(params): Promise<AsyncIterable<ChatResponseChunk<object>, any, any>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<AdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<object>, any, any>>

Inherited from

BaseLLM.chat

Defined in

packages/core/llms/dist/index.d.ts:165

chat(params)

abstract chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Parameters

params: LLMChatParamsNonStreaming<AdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<ChatResponse<ToolCallLLMMessageOptions>>

Inherited from

BaseLLM.chat

Defined in

packages/core/llms/dist/index.d.ts:166


complete()

complete(params)

complete(params): Promise<AsyncIterable<CompletionResponse, any, any>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse, any, any>>

Inherited from

BaseLLM.complete

Defined in

packages/core/llms/dist/index.d.ts:163

complete(params)

complete(params): Promise<CompletionResponse>

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/llms/dist/index.d.ts:164