HuggingFaceLLM
Defined in: providers/huggingface/src/llm.ts:31
Extends
BaseLLM
Constructors
new HuggingFaceLLM()
new HuggingFaceLLM(
init
?):HuggingFaceLLM
Defined in: providers/huggingface/src/llm.ts:42
Parameters
init?
Returns
Overrides
BaseLLM.constructor
Properties
modelName
modelName:
string
Defined in: providers/huggingface/src/llm.ts:32
tokenizerName
tokenizerName:
string
Defined in: providers/huggingface/src/llm.ts:33
temperature
temperature:
number
Defined in: providers/huggingface/src/llm.ts:34
topP
topP:
number
Defined in: providers/huggingface/src/llm.ts:35
maxTokens?
optional
maxTokens:number
Defined in: providers/huggingface/src/llm.ts:36
contextWindow
contextWindow:
number
Defined in: providers/huggingface/src/llm.ts:37
Accessors
metadata
Get Signature
get metadata():
LLMMetadata
Defined in: providers/huggingface/src/llm.ts:52
Returns
LLMMetadata
Overrides
BaseLLM.metadata
Methods
getTokenizer()
getTokenizer():
Promise
<PreTrainedTokenizer
>
Defined in: providers/huggingface/src/llm.ts:63
Returns
Promise
<PreTrainedTokenizer
>
getModel()
getModel():
Promise
<PreTrainedModel
>
Defined in: providers/huggingface/src/llm.ts:79
Returns
Promise
<PreTrainedModel
>
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
,any
,any
>>
Defined in: providers/huggingface/src/llm.ts:95
Parameters
params
LLMChatParamsStreaming
<object
, object
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
, any
, any
>>
Overrides
BaseLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<object
>>
Defined in: providers/huggingface/src/llm.ts:98
Parameters
params
LLMChatParamsNonStreaming
<object
, object
>
Returns
Promise
<ChatResponse
<object
>>
Overrides
BaseLLM.chat
nonStreamChat()
protected
nonStreamChat(params
):Promise
<ChatResponse
<object
>>
Defined in: providers/huggingface/src/llm.ts:107
Parameters
params
LLMChatParamsNonStreaming
<object
, object
>
Returns
Promise
<ChatResponse
<object
>>
streamChat()
protected
streamChat(params
):Promise
<never
>
Defined in: providers/huggingface/src/llm.ts:142
Parameters
params
LLMChatParamsStreaming
<object
, object
>
Returns
Promise
<never
>