MistralAI
Defined in: packages/providers/mistral/src/llm.ts:79
MistralAI LLM implementation
Extends
ToolCallLLM
<ToolCallLLMMessageOptions
>
Constructors
Constructor
new MistralAI(
init?
):MistralAI
Defined in: packages/providers/mistral/src/llm.ts:91
Parameters
init?
Partial
<MistralAI
>
Returns
MistralAI
Overrides
ToolCallLLM<ToolCallLLMMessageOptions>.constructor
Properties
model
model:
"mistral-small-latest"
|"mistral-large-latest"
|"codestral-latest"
|"pixtral-large-latest"
|"ministral-8b-latest"
|"ministral-3b-latest"
|"mistral-tiny"
|"mistral-small"
|"mistral-medium"
|"mistral-saba-latest"
|"mistral-embed"
|"mistral-moderation-latest"
Defined in: packages/providers/mistral/src/llm.ts:81
temperature
temperature:
number
Defined in: packages/providers/mistral/src/llm.ts:82
topP
topP:
number
Defined in: packages/providers/mistral/src/llm.ts:83
maxTokens?
optional
maxTokens:number
Defined in: packages/providers/mistral/src/llm.ts:84
apiKey?
optional
apiKey:string
Defined in: packages/providers/mistral/src/llm.ts:85
safeMode
safeMode:
boolean
Defined in: packages/providers/mistral/src/llm.ts:86
randomSeed?
optional
randomSeed:number
Defined in: packages/providers/mistral/src/llm.ts:87
Accessors
metadata
Get Signature
get metadata():
object
Defined in: packages/providers/mistral/src/llm.ts:102
Returns
object
model
model:
"mistral-small-latest"
|"mistral-large-latest"
|"codestral-latest"
|"pixtral-large-latest"
|"ministral-8b-latest"
|"ministral-3b-latest"
|"mistral-tiny"
|"mistral-small"
|"mistral-medium"
|"mistral-saba-latest"
|"mistral-embed"
|"mistral-moderation-latest"
temperature
temperature:
number
topP
topP:
number
maxTokens
maxTokens:
undefined
|number
contextWindow
contextWindow:
number
tokenizer
tokenizer:
undefined
=undefined
structuredOutput
structuredOutput:
boolean
=false
Overrides
ToolCallLLM.metadata
supportToolCall
Get Signature
get supportToolCall():
boolean
Defined in: packages/providers/mistral/src/llm.ts:114
Returns
boolean
Overrides
ToolCallLLM.supportToolCall
Methods
formatMessages()
formatMessages(
messages
): ({role
:"assistant"
;content
:string
;toolCalls
:object
[];toolCallId
:undefined
; } | {toolCalls
:undefined
;role
:"tool"
;content
:string
;toolCallId
:string
; } | {toolCalls
:undefined
;toolCallId
:undefined
;role
:MessageType
;content
:string
; })[]
Defined in: packages/providers/mistral/src/llm.ts:118
Parameters
messages
ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
({ role
: "assistant"
; content
: string
; toolCalls
: object
[]; toolCallId
: undefined
; } | { toolCalls
: undefined
; role
: "tool"
; content
: string
; toolCallId
: string
; } | { toolCalls
: undefined
; toolCallId
: undefined
; role
: MessageType
; content
: string
; })[]
toTool()
static
toTool(tool
):Tool
Defined in: packages/providers/mistral/src/llm.ts:171
Parameters
tool
BaseTool
Returns
Tool
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
,any
,any
>>
Defined in: packages/providers/mistral/src/llm.ts:186
Parameters
params
LLMChatParamsStreaming
Returns
Promise
<AsyncIterable
<ChatResponseChunk
, any
, any
>>
Overrides
ToolCallLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<object
>>
Defined in: packages/providers/mistral/src/llm.ts:189
Parameters
params
LLMChatParamsNonStreaming
<ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<object
>>
Overrides
ToolCallLLM.chat
streamChat()
protected
streamChat(messages
,tools?
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/mistral/src/llm.ts:240
Parameters
messages
ChatMessage
[]
tools?
BaseTool
<any
>[]
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>