ReplicateLLM
Defined in: providers/replicate/src/llm.ts:107
Replicate LLM implementation used
Extends
BaseLLM
Constructors
new ReplicateLLM()
new ReplicateLLM(
init
?):ReplicateLLM
Defined in: providers/replicate/src/llm.ts:115
Parameters
init?
Partial
<ReplicateLLM
> & object
Returns
Overrides
BaseLLM.constructor
Properties
model
model:
"Llama-2-70b-chat-old"
|"Llama-2-70b-chat-4bit"
|"Llama-2-13b-chat-old"
|"Llama-2-13b-chat-4bit"
|"Llama-2-7b-chat-old"
|"Llama-2-7b-chat-4bit"
|"llama-3-70b-instruct"
|"llama-3-8b-instruct"
Defined in: providers/replicate/src/llm.ts:108
chatStrategy
chatStrategy:
ReplicateChatStrategy
Defined in: providers/replicate/src/llm.ts:109
temperature
temperature:
number
Defined in: providers/replicate/src/llm.ts:110
topP
topP:
number
Defined in: providers/replicate/src/llm.ts:111
maxTokens?
optional
maxTokens:number
Defined in: providers/replicate/src/llm.ts:112
replicateSession
replicateSession:
ReplicateSession
Defined in: providers/replicate/src/llm.ts:113
Accessors
metadata
Get Signature
get metadata():
object
Defined in: providers/replicate/src/llm.ts:140
Returns
object
model
model:
"Llama-2-70b-chat-old"
|"Llama-2-70b-chat-4bit"
|"Llama-2-13b-chat-old"
|"Llama-2-13b-chat-4bit"
|"Llama-2-7b-chat-old"
|"Llama-2-7b-chat-4bit"
|"llama-3-70b-instruct"
|"llama-3-8b-instruct"
temperature
temperature:
number
topP
topP:
number
maxTokens
maxTokens:
undefined
|number
contextWindow
contextWindow:
number
tokenizer
tokenizer:
undefined
=undefined
Overrides
BaseLLM.metadata
Methods
mapMessagesToPrompt()
mapMessagesToPrompt(
messages
):object
Defined in: providers/replicate/src/llm.ts:151
Parameters
messages
ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
|MessageContent
mapMessagesToPromptLlama3()
mapMessagesToPromptLlama3(
messages
):object
Defined in: providers/replicate/src/llm.ts:177
Parameters
messages
ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
=undefined
mapMessagesToPromptA16Z()
mapMessagesToPromptA16Z(
messages
):object
Defined in: providers/replicate/src/llm.ts:203
Parameters
messages
ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
=undefined
mapMessageTypeA16Z()
mapMessageTypeA16Z(
messageType
):string
Defined in: providers/replicate/src/llm.ts:217
Parameters
messageType
MessageType
Returns
string
mapMessagesToPromptMeta()
mapMessagesToPromptMeta(
messages
,opts
?):object
Defined in: providers/replicate/src/llm.ts:230
Parameters
messages
ChatMessage
[]
opts?
withBos?
boolean
replicate4Bit?
boolean
withNewlines?
boolean
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
|MessageContent
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
,any
,any
>>
Defined in: providers/replicate/src/llm.ts:306
Parameters
params
LLMChatParamsStreaming
<object
, object
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
, any
, any
>>
Overrides
BaseLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<object
>>
Defined in: providers/replicate/src/llm.ts:309
Parameters
params
LLMChatParamsNonStreaming
<object
, object
>
Returns
Promise
<ChatResponse
<object
>>
Overrides
BaseLLM.chat