Pular para o conteúdo principal

Class: Ollama

Unified language model interface

Hierarchy

Implements

Constructors

constructor

new Ollama(init)

Parameters

NameType
initPartial<Ollama> & { model: string }

Overrides

BaseEmbedding.constructor

Defined in

packages/core/src/llm/ollama.ts:40

Properties

additionalChatOptions

Optional additionalChatOptions: Record<string, unknown>

Defined in

packages/core/src/llm/ollama.ts:37


baseURL

baseURL: string = "http://127.0.0.1:11434"

Defined in

packages/core/src/llm/ollama.ts:32


callbackManager

Optional callbackManager: CallbackManager

Defined in

packages/core/src/llm/ollama.ts:38


contextWindow

contextWindow: number = 4096

Defined in

packages/core/src/llm/ollama.ts:35


hasStreaming

Readonly hasStreaming: true

Defined in

packages/core/src/llm/ollama.ts:28


model

model: string

Defined in

packages/core/src/llm/ollama.ts:31


requestTimeout

requestTimeout: number

Defined in

packages/core/src/llm/ollama.ts:36


temperature

temperature: number = 0.7

Defined in

packages/core/src/llm/ollama.ts:33


topP

topP: number = 0.9

Defined in

packages/core/src/llm/ollama.ts:34

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Implementation of

LLM.metadata

Defined in

packages/core/src/llm/ollama.ts:51

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Get a chat response from the LLM

Parameters

NameType
paramsLLMChatParamsStreaming

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Implementation of

LLM.chat

Defined in

packages/core/src/llm/ollama.ts:62

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming

Returns

Promise<ChatResponse>

Implementation of

LLM.chat

Defined in

packages/core/src/llm/ollama.ts:65


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Implementation of

LLM.complete

Defined in

packages/core/src/llm/ollama.ts:138

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Implementation of

LLM.complete

Defined in

packages/core/src/llm/ollama.ts:141


getEmbedding

Private getEmbedding(prompt): Promise<number[]>

Parameters

NameType
promptstring

Returns

Promise<number[]>

Defined in

packages/core/src/llm/ollama.ts:185


getQueryEmbedding

getQueryEmbedding(query): Promise<number[]>

Parameters

NameType
querystring

Returns

Promise<number[]>

Overrides

BaseEmbedding.getQueryEmbedding

Defined in

packages/core/src/llm/ollama.ts:212


getTextEmbedding

getTextEmbedding(text): Promise<number[]>

Parameters

NameType
textstring

Returns

Promise<number[]>

Overrides

BaseEmbedding.getTextEmbedding

Defined in

packages/core/src/llm/ollama.ts:208


similarity

similarity(embedding1, embedding2, mode?): number

Parameters

NameTypeDefault value
embedding1number[]undefined
embedding2number[]undefined
modeSimilarityTypeSimilarityType.DEFAULT

Returns

number

Inherited from

BaseEmbedding.similarity

Defined in

packages/core/src/embeddings/types.ts:14


streamChat

Private streamChat<T>(stream, accessor, parentEvent?): AsyncIterable<T>

Type parameters

Name
T

Parameters

NameType
streamReadableStream<Uint8Array>
accessor(data: any) => T
parentEvent?Event

Returns

AsyncIterable<T>

Defined in

packages/core/src/llm/ollama.ts:110


tokens

tokens(messages): number

Calculates the number of tokens needed for the given chat messages

Parameters

NameType
messagesChatMessage[]

Returns

number

Implementation of

LLM.tokens

Defined in

packages/core/src/llm/ollama.ts:181