Skip to main content

Class: OllamaEmbedding

OllamaEmbedding is an alias for Ollama that implements the BaseEmbedding interface.

Extends

Implements

Constructors

new OllamaEmbedding()

new OllamaEmbedding(params): OllamaEmbedding

Parameters

params: OllamaParams

Returns

OllamaEmbedding

Inherited from

Ollama . constructor

Source

packages/core/src/llm/ollama.ts:75

Properties

embedBatchSize

embedBatchSize: number = DEFAULT_EMBED_BATCH_SIZE

Implementation of

BaseEmbedding . embedBatchSize

Inherited from

Ollama . embedBatchSize

Source

packages/core/src/embeddings/types.ts:11


hasStreaming

readonly hasStreaming: true = true

Inherited from

Ollama . hasStreaming

Source

packages/core/src/llm/ollama.ts:61


model

model: string

Inherited from

Ollama . model

Source

packages/core/src/llm/ollama.ts:66


ollama

ollama: Ollama

Inherited from

Ollama . ollama

Source

packages/core/src/llm/ollama.ts:63


options

options: Partial<Omit<Options, "temperature" | "top_p" | "num_ctx">> & Pick<Options, "temperature" | "top_p" | "num_ctx">

Inherited from

Ollama . options

Source

packages/core/src/llm/ollama.ts:68

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Source

packages/core/src/llm/ollama.ts:87

Methods

abort()

abort(): void

Returns

void

Inherited from

Ollama . abort

Source

packages/core/src/llm/ollama.ts:209


chat()

chat(params)

chat(params): Promise<AsyncIterable <ChatResponseChunk>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<object, object>

Returns

Promise<AsyncIterable <ChatResponseChunk>>

Inherited from

Ollama . chat

Source

packages/core/src/llm/ollama.ts:99

chat(params)

chat(params): Promise <ChatResponse<object>>

Parameters

params: LLMChatParamsNonStreaming<object, object>

Returns

Promise <ChatResponse<object>>

Inherited from

Ollama . chat

Source

packages/core/src/llm/ollama.ts:102


complete()

complete(params)

complete(params): Promise<AsyncIterable <CompletionResponse>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable <CompletionResponse>>

Inherited from

Ollama . complete

Source

packages/core/src/llm/ollama.ts:140

complete(params)

complete(params): Promise <CompletionResponse>

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise <CompletionResponse>

Inherited from

Ollama . complete

Source

packages/core/src/llm/ollama.ts:143


copy()

copy(request): Promise<StatusResponse>

Parameters

request: CopyRequest

Returns

Promise<StatusResponse>

Inherited from

Ollama . copy

Source

packages/core/src/llm/ollama.ts:245


create()

create(request)

create(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

request: CreateRequest & object

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Inherited from

Ollama . create

Source

packages/core/src/llm/ollama.ts:224

create(request)

create(request): Promise<ProgressResponse>

Parameters

request: CreateRequest & object

Returns

Promise<ProgressResponse>

Inherited from

Ollama . create

Source

packages/core/src/llm/ollama.ts:227


delete()

delete(request): Promise<StatusResponse>

Parameters

request: DeleteRequest

Returns

Promise<StatusResponse>

Inherited from

Ollama . delete

Source

packages/core/src/llm/ollama.ts:242


embeddings()

embeddings(request): Promise<EmbeddingsResponse>

Parameters

request: EmbeddingsRequest

Returns

Promise<EmbeddingsResponse>

Inherited from

Ollama . embeddings

Source

packages/core/src/llm/ollama.ts:254


encodeImage()

encodeImage(image): Promise<string>

Parameters

image: string | Uint8Array

Returns

Promise<string>

Inherited from

Ollama . encodeImage

Source

packages/core/src/llm/ollama.ts:212


generate()

generate(request)

generate(request): Promise<AsyncGenerator<GenerateResponse, any, unknown>>

Parameters

request: GenerateRequest & object

Returns

Promise<AsyncGenerator<GenerateResponse, any, unknown>>

Inherited from

Ollama . generate

Source

packages/core/src/llm/ollama.ts:215

generate(request)

generate(request): Promise<GenerateResponse>

Parameters

request: GenerateRequest & object

Returns

Promise<GenerateResponse>

Inherited from

Ollama . generate

Source

packages/core/src/llm/ollama.ts:218


getQueryEmbedding()

getQueryEmbedding(query): Promise<number[]>

Parameters

query: string

Returns

Promise<number[]>

Implementation of

BaseEmbedding . getQueryEmbedding

Inherited from

Ollama . getQueryEmbedding

Source

packages/core/src/llm/ollama.ts:194


getTextEmbedding()

getTextEmbedding(text): Promise<number[]>

Parameters

text: string

Returns

Promise<number[]>

Implementation of

BaseEmbedding . getTextEmbedding

Inherited from

Ollama . getTextEmbedding

Source

packages/core/src/llm/ollama.ts:190


getTextEmbeddings()

getTextEmbeddings(texts): Promise<number[][]>

Optionally override this method to retrieve multiple embeddings in a single request

Parameters

texts: string[]

Returns

Promise<number[][]>

Implementation of

BaseEmbedding . getTextEmbeddings

Inherited from

Ollama . getTextEmbeddings

Source

packages/core/src/embeddings/types.ts:28


getTextEmbeddingsBatch()

getTextEmbeddingsBatch(texts, options?): Promise<number[][]>

Get embeddings for a batch of texts

Parameters

texts: string[]

options?

options.logProgress?: boolean

Returns

Promise<number[][]>

Implementation of

BaseEmbedding . getTextEmbeddingsBatch

Inherited from

Ollama . getTextEmbeddingsBatch

Source

packages/core/src/embeddings/types.ts:44


list()

list(): Promise<ListResponse>

Returns

Promise<ListResponse>

Inherited from

Ollama . list

Source

packages/core/src/llm/ollama.ts:248


pull()

pull(request)

pull(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

request: PullRequest & object

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Inherited from

Ollama . pull

Source

packages/core/src/llm/ollama.ts:233

pull(request)

pull(request): Promise<ProgressResponse>

Parameters

request: PullRequest & object

Returns

Promise<ProgressResponse>

Inherited from

Ollama . pull

Source

packages/core/src/llm/ollama.ts:236


push()

push(request)

push(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

request: PushRequest & object

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Inherited from

Ollama . push

Source

packages/core/src/llm/ollama.ts:200

push(request)

push(request): Promise<ProgressResponse>

Parameters

request: PushRequest & object

Returns

Promise<ProgressResponse>

Inherited from

Ollama . push

Source

packages/core/src/llm/ollama.ts:203


show()

show(request): Promise<ShowResponse>

Parameters

request: ShowRequest

Returns

Promise<ShowResponse>

Inherited from

Ollama . show

Source

packages/core/src/llm/ollama.ts:251


similarity()

similarity(embedding1, embedding2, mode): number

Parameters

embedding1: number[]

embedding2: number[]

mode: SimilarityType= SimilarityType.DEFAULT

Returns

number

Implementation of

BaseEmbedding . similarity

Inherited from

Ollama . similarity

Source

packages/core/src/embeddings/types.ts:13


transform()

transform(nodes, _options?): Promise <BaseNode <Metadata>[]>

Parameters

nodes: BaseNode <Metadata>[]

_options?: any

Returns

Promise <BaseNode <Metadata>[]>

Implementation of

BaseEmbedding . transform

Inherited from

Ollama . transform

Source

packages/core/src/embeddings/types.ts:58