ReplicateLLM
Defined in: packages/providers/replicate/src/llm.ts:107
Replicate LLM implementation used
Extends
BaseLLM
Constructors
Constructor
new ReplicateLLM(
init?
):ReplicateLLM
Defined in: packages/providers/replicate/src/llm.ts:115
Parameters
init?
Partial
<ReplicateLLM
> & object
Returns
ReplicateLLM
Overrides
BaseLLM.constructor
Properties
model
model:
"Llama-2-70b-chat-old"
|"Llama-2-70b-chat-4bit"
|"Llama-2-13b-chat-old"
|"Llama-2-13b-chat-4bit"
|"Llama-2-7b-chat-old"
|"Llama-2-7b-chat-4bit"
|"llama-3-70b-instruct"
|"llama-3-8b-instruct"
Defined in: packages/providers/replicate/src/llm.ts:108
chatStrategy
chatStrategy:
ReplicateChatStrategy
Defined in: packages/providers/replicate/src/llm.ts:109
temperature
temperature:
number
Defined in: packages/providers/replicate/src/llm.ts:110
topP
topP:
number
Defined in: packages/providers/replicate/src/llm.ts:111
maxTokens?
optional
maxTokens:number
Defined in: packages/providers/replicate/src/llm.ts:112
replicateSession
replicateSession:
ReplicateSession
Defined in: packages/providers/replicate/src/llm.ts:113
Accessors
metadata
Get Signature
get metadata():
object
Defined in: packages/providers/replicate/src/llm.ts:140
Returns
object
model
model:
"Llama-2-70b-chat-old"
|"Llama-2-70b-chat-4bit"
|"Llama-2-13b-chat-old"
|"Llama-2-13b-chat-4bit"
|"Llama-2-7b-chat-old"
|"Llama-2-7b-chat-4bit"
|"llama-3-70b-instruct"
|"llama-3-8b-instruct"
temperature
temperature:
number
topP
topP:
number
maxTokens
maxTokens:
undefined
|number
contextWindow
contextWindow:
number
tokenizer
tokenizer:
undefined
=undefined
structuredOutput
structuredOutput:
boolean
=false
Overrides
BaseLLM.metadata
Methods
mapMessagesToPrompt()
mapMessagesToPrompt(
messages
):object
Defined in: packages/providers/replicate/src/llm.ts:152
Parameters
messages
ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
|MessageContent
mapMessagesToPromptLlama3()
mapMessagesToPromptLlama3(
messages
):object
Defined in: packages/providers/replicate/src/llm.ts:178
Parameters
messages
ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
=undefined
mapMessagesToPromptA16Z()
mapMessagesToPromptA16Z(
messages
):object
Defined in: packages/providers/replicate/src/llm.ts:204
Parameters
messages
ChatMessage
[]
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
=undefined
mapMessageTypeA16Z()
mapMessageTypeA16Z(
messageType
):string
Defined in: packages/providers/replicate/src/llm.ts:218
Parameters
messageType
MessageType
Returns
string
mapMessagesToPromptMeta()
mapMessagesToPromptMeta(
messages
,opts?
):object
Defined in: packages/providers/replicate/src/llm.ts:231
Parameters
messages
ChatMessage
[]
opts?
withBos?
boolean
replicate4Bit?
boolean
withNewlines?
boolean
Returns
object
prompt
prompt:
string
systemPrompt
systemPrompt:
undefined
|MessageContent
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
,any
,any
>>
Defined in: packages/providers/replicate/src/llm.ts:307
Parameters
params
LLMChatParamsStreaming
Returns
Promise
<AsyncIterable
<ChatResponseChunk
, any
, any
>>
Overrides
BaseLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<object
>>
Defined in: packages/providers/replicate/src/llm.ts:310
Parameters
params
LLMChatParamsNonStreaming
Returns
Promise
<ChatResponse
<object
>>
Overrides
BaseLLM.chat