Zum Hauptinhalt springen

Class: Anthropic

Anthropic LLM implementation

Hierarchy

Constructors

constructor

new Anthropic(init?)

Parameters

NameType
init?Partial<Anthropic>

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/LLM.ts:718

Properties

apiKey

Optional apiKey: string = undefined

Defined in

packages/core/src/llm/LLM.ts:711


callbackManager

Optional callbackManager: CallbackManager

Defined in

packages/core/src/llm/LLM.ts:716


maxRetries

maxRetries: number

Defined in

packages/core/src/llm/LLM.ts:712


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/LLM.ts:708


model

model: "claude-2" | "claude-instant-1"

Defined in

packages/core/src/llm/LLM.ts:705


session

session: AnthropicSession

Defined in

packages/core/src/llm/LLM.ts:714


temperature

temperature: number

Defined in

packages/core/src/llm/LLM.ts:706


timeout

Optional timeout: number

Defined in

packages/core/src/llm/LLM.ts:713


topP

topP: number

Defined in

packages/core/src/llm/LLM.ts:707

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"claude-2" | "claude-instant-1"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/LLM.ts:743

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Get a chat response from the LLM

Parameters

NameType
paramsLLMChatParamsStreaming

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/LLM.ts:771

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming

Returns

Promise<ChatResponse>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/LLM.ts:774


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/LLM.ts:138

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/LLM.ts:141


mapMessagesToPrompt

mapMessagesToPrompt(messages): string

Parameters

NameType
messagesChatMessage[]

Returns

string

Defined in

packages/core/src/llm/LLM.ts:754


streamChat

Protected streamChat(messages, parentEvent?): AsyncIterable<ChatResponseChunk>

Parameters

NameType
messagesChatMessage[]
parentEvent?Event

Returns

AsyncIterable<ChatResponseChunk>

Defined in

packages/core/src/llm/LLM.ts:800


tokens

tokens(messages): number

Calculates the number of tokens needed for the given chat messages

Parameters

NameType
messagesChatMessage[]

Returns

number

Overrides

BaseLLM.tokens

Defined in

packages/core/src/llm/LLM.ts:739