Pular para o conteúdo principal

Class: Anthropic

Extends

Constructors

new Anthropic()

new Anthropic(init?): Anthropic

Parameters

init?: Partial <Anthropic>

Returns

Anthropic

Overrides

ToolCallLLM . constructor

Source

packages/llamaindex/src/llm/anthropic.ts:115

Properties

apiKey?

optional apiKey: string = undefined

Source

packages/llamaindex/src/llm/anthropic.ts:110


maxRetries

maxRetries: number

Source

packages/llamaindex/src/llm/anthropic.ts:111


maxTokens?

optional maxTokens: number

Source

packages/llamaindex/src/llm/anthropic.ts:107


model

model: "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

Source

packages/llamaindex/src/llm/anthropic.ts:104


session

session: AnthropicSession

Source

packages/llamaindex/src/llm/anthropic.ts:113


temperature

temperature: number

Source

packages/llamaindex/src/llm/anthropic.ts:105


timeout?

optional timeout: number

Source

packages/llamaindex/src/llm/anthropic.ts:112


topP

topP: number

Source

packages/llamaindex/src/llm/anthropic.ts:106

Accessors

metadata

get metadata(): object

Returns

object

contextWindow

contextWindow: number

maxTokens

maxTokens: undefined | number

model

model: "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

temperature

temperature: number

tokenizer

tokenizer: undefined = undefined

topP

topP: number

Source

packages/llamaindex/src/llm/anthropic.ts:138


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Source

packages/llamaindex/src/llm/anthropic.ts:134

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable <ChatResponseChunk <ToolCallLLMMessageOptions>>>

Parameters

params: LLMChatParamsStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable <ChatResponseChunk <ToolCallLLMMessageOptions>>>

Overrides

ToolCallLLM . chat

Source

packages/llamaindex/src/llm/anthropic.ts:262

chat(params)

chat(params): Promise <ChatResponse <ToolCallLLMMessageOptions>>

Parameters

params: LLMChatParamsNonStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise <ChatResponse <ToolCallLLMMessageOptions>>

Overrides

ToolCallLLM . chat

Source

packages/llamaindex/src/llm/anthropic.ts:268


complete()

complete(params)

complete(params): Promise<AsyncIterable <CompletionResponse>>

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable <CompletionResponse>>

Inherited from

ToolCallLLM . complete

Source

packages/llamaindex/src/llm/base.ts:22

complete(params)

complete(params): Promise <CompletionResponse>

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise <CompletionResponse>

Inherited from

ToolCallLLM . complete

Source

packages/llamaindex/src/llm/base.ts:25


formatMessages()

formatMessages<Beta>(messages): Beta extends true ? ToolsBetaMessageParam[] : MessageParam[]

Type parameters

Beta = false

Parameters

messages: ChatMessage <ToolCallLLMMessageOptions>[]

Returns

Beta extends true ? ToolsBetaMessageParam[] : MessageParam[]

Source

packages/llamaindex/src/llm/anthropic.ts:156


getModelName()

getModelName(model): string

Parameters

model: string

Returns

string

Source

packages/llamaindex/src/llm/anthropic.ts:149


streamChat()

protected streamChat(messages, systemPrompt?): AsyncIterable <ChatResponseChunk <ToolCallLLMMessageOptions>>

Parameters

messages: ChatMessage <ToolCallLLMMessageOptions>[]

systemPrompt?: null | string

Returns

AsyncIterable <ChatResponseChunk <ToolCallLLMMessageOptions>>

Source

packages/llamaindex/src/llm/anthropic.ts:379


toTool()

static toTool(tool): Tool

Parameters

tool: BaseTool<any>

Returns

Tool

Source

packages/llamaindex/src/llm/anthropic.ts:410