Skip to main content

Class: Anthropic

Unified language model interface

Extends

Constructors

new Anthropic()

new Anthropic(init?): Anthropic

Parameters

init?: Partial<Anthropic>

Returns

Anthropic

Overrides

ToolCallLLM.constructor

Defined in

packages/llm/anthropic/dist/index.d.ts:75

Properties

apiKey?

optional apiKey: string

Defined in

packages/llm/anthropic/dist/index.d.ts:71


getModelName()

getModelName: (model) => string

Parameters

model: string

Returns

string

Defined in

packages/llm/anthropic/dist/index.d.ts:85


maxRetries

maxRetries: number

Defined in

packages/llm/anthropic/dist/index.d.ts:72


maxTokens?

optional maxTokens: number

Defined in

packages/llm/anthropic/dist/index.d.ts:70


model

model: "claude-3-5-sonnet" | "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

Defined in

packages/llm/anthropic/dist/index.d.ts:67


session

session: AnthropicSession

Defined in

packages/llm/anthropic/dist/index.d.ts:74


temperature

temperature: number

Defined in

packages/llm/anthropic/dist/index.d.ts:68


timeout?

optional timeout: number

Defined in

packages/llm/anthropic/dist/index.d.ts:73


topP

topP: number

Defined in

packages/llm/anthropic/dist/index.d.ts:69

Accessors

metadata

get metadata(): object

Returns

object

contextWindow

contextWindow: number

maxTokens

maxTokens: undefined | number

model

model: "claude-3-5-sonnet" | "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

temperature

temperature: number

tokenizer

tokenizer: undefined

topP

topP: number

Overrides

ToolCallLLM.metadata

Defined in

packages/llm/anthropic/dist/index.d.ts:77


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Overrides

ToolCallLLM.supportToolCall

Defined in

packages/llm/anthropic/dist/index.d.ts:76

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>

Overrides

ToolCallLLM.chat

Defined in

packages/llm/anthropic/dist/index.d.ts:87

chat(params)

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsNonStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<ChatResponse<ToolCallLLMMessageOptions>>

Overrides

ToolCallLLM.chat

Defined in

packages/llm/anthropic/dist/index.d.ts:88


complete()

complete(params)

complete(params): Promise<AsyncIterable<CompletionResponse, any, any>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse, any, any>>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/llms/dist/index.d.ts:168

complete(params)

complete(params): Promise<CompletionResponse>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/llms/dist/index.d.ts:169


formatMessages()

formatMessages(messages): MessageParam[]

Parameters

messages: ChatMessage<ToolCallLLMMessageOptions>[]

Returns

MessageParam[]

Defined in

packages/llm/anthropic/dist/index.d.ts:86


streamChat()

protected streamChat(messages, systemPrompt?): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>

Parameters

messages: ChatMessage<ToolCallLLMMessageOptions>[]

systemPrompt?: null | string

Returns

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>

Defined in

packages/llm/anthropic/dist/index.d.ts:89


toTool()

static toTool(tool): Tool

Parameters

tool: BaseTool<any>

Returns

Tool

Defined in

packages/llm/anthropic/dist/index.d.ts:90