Skip to main content

Class: Portkey

Unified language model interface

Hierarchy

Constructors

constructor

new Portkey(init?)

Parameters

NameType
init?Partial<Portkey>

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/LLM.ts:834

Properties

apiKey

Optional apiKey: string = undefined

Defined in

packages/core/src/llm/LLM.ts:827


baseURL

Optional baseURL: string = undefined

Defined in

packages/core/src/llm/LLM.ts:828


callbackManager

Optional callbackManager: CallbackManager

Defined in

packages/core/src/llm/LLM.ts:832


llms

Optional llms: null | [LLMOptions] = undefined

Defined in

packages/core/src/llm/LLM.ts:830


mode

Optional mode: string = undefined

Defined in

packages/core/src/llm/LLM.ts:829


session

session: PortkeySession

Defined in

packages/core/src/llm/LLM.ts:831

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/LLM.ts:853

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Get a chat response from the LLM

Parameters

NameType
paramsLLMChatParamsStreaming

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/LLM.ts:857

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming

Returns

Promise<ChatResponse>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/LLM.ts:860


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/LLM.ts:138

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/LLM.ts:141


streamChat

streamChat(messages, parentEvent?, params?): AsyncIterable<ChatResponseChunk>

Parameters

NameType
messagesChatMessage[]
parentEvent?Event
params?Record<string, any>

Returns

AsyncIterable<ChatResponseChunk>

Defined in

packages/core/src/llm/LLM.ts:880


tokens

tokens(messages): number

Calculates the number of tokens needed for the given chat messages

Parameters

NameType
messagesChatMessage[]

Returns

number

Overrides

BaseLLM.tokens

Defined in

packages/core/src/llm/LLM.ts:849