OpenAI
Defined in: providers/openai/src/llm.ts:187
Extends
ToolCallLLM
<OpenAIAdditionalChatOptions
>
Constructors
new OpenAI()
new OpenAI(
init
?):OpenAI
Defined in: providers/openai/src/llm.ts:216
Parameters
init?
Omit
<Partial
<OpenAI
>, "session"
> & object
Returns
Overrides
ToolCallLLM<OpenAIAdditionalChatOptions>.constructor
Properties
model
model:
ChatModel
|string
&object
Defined in: providers/openai/src/llm.ts:188
temperature
temperature:
number
Defined in: providers/openai/src/llm.ts:192
reasoningEffort?
optional
reasoningEffort:"low"
|"medium"
|"high"
Defined in: providers/openai/src/llm.ts:193
topP
topP:
number
Defined in: providers/openai/src/llm.ts:194
maxTokens?
optional
maxTokens:number
Defined in: providers/openai/src/llm.ts:195
additionalChatOptions?
optional
additionalChatOptions:OpenAIAdditionalChatOptions
Defined in: providers/openai/src/llm.ts:196
apiKey?
optional
apiKey:string
=undefined
Defined in: providers/openai/src/llm.ts:199
maxRetries
maxRetries:
number
Defined in: providers/openai/src/llm.ts:200
timeout?
optional
timeout:number
Defined in: providers/openai/src/llm.ts:201
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"timeout"
|"maxRetries"
>
Defined in: providers/openai/src/llm.ts:202
lazySession()
lazySession: () =>
Promise
<LLMInstance
>
Defined in: providers/openai/src/llm.ts:207
Returns
Promise
<LLMInstance
>
Accessors
session
Get Signature
get session():
Promise
<LLMInstance
>
Defined in: providers/openai/src/llm.ts:209
Returns
Promise
<LLMInstance
>
supportToolCall
Get Signature
get supportToolCall():
boolean
Defined in: providers/openai/src/llm.ts:272
Returns
boolean
Overrides
ToolCallLLM.supportToolCall
metadata
Get Signature
get metadata():
LLMMetadata
&object
Defined in: providers/openai/src/llm.ts:276
Returns
LLMMetadata
& object
Overrides
ToolCallLLM.metadata
Methods
toOpenAIRole()
static
toOpenAIRole(messageType
):ChatCompletionRole
Defined in: providers/openai/src/llm.ts:291
Parameters
messageType
MessageType
Returns
ChatCompletionRole
toOpenAIMessage()
static
toOpenAIMessage(messages
):ChatCompletionMessageParam
[]
Defined in: providers/openai/src/llm.ts:304
Parameters
messages
ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
ChatCompletionMessageParam
[]
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Defined in: providers/openai/src/llm.ts:353
Parameters
params
LLMChatParamsStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
ToolCallLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Defined in: providers/openai/src/llm.ts:359
Parameters
params
LLMChatParamsNonStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
ToolCallLLM.chat
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>
Defined in: providers/openai/src/llm.ts:441
Parameters
baseRequestParams
ChatCompletionCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>
toTool()
static
toTool(tool
):ChatCompletionTool
Defined in: providers/openai/src/llm.ts:527
Parameters
tool
BaseTool
<any
>
Returns
ChatCompletionTool