OpenAI
Defined in: packages/providers/openai/src/llm.ts:55
Extends
ToolCallLLM
<OpenAIAdditionalChatOptions
>
Constructors
Constructor
new OpenAI(
init?
):OpenAI
Defined in: packages/providers/openai/src/llm.ts:85
Parameters
init?
Omit
<Partial
<OpenAI
>, "session"
> & object
Returns
OpenAI
Overrides
ToolCallLLM<OpenAIAdditionalChatOptions>.constructor
Properties
model
model:
ChatModel
|string
&object
Defined in: packages/providers/openai/src/llm.ts:56
temperature
temperature:
number
Defined in: packages/providers/openai/src/llm.ts:60
reasoningEffort?
optional
reasoningEffort:"high"
|"low"
|"medium"
Defined in: packages/providers/openai/src/llm.ts:61
topP
topP:
number
Defined in: packages/providers/openai/src/llm.ts:62
maxTokens?
optional
maxTokens:number
Defined in: packages/providers/openai/src/llm.ts:63
additionalChatOptions?
optional
additionalChatOptions:OpenAIAdditionalChatOptions
Defined in: packages/providers/openai/src/llm.ts:64
apiKey?
optional
apiKey:string
=undefined
Defined in: packages/providers/openai/src/llm.ts:67
baseURL?
optional
baseURL:string
=undefined
Defined in: packages/providers/openai/src/llm.ts:68
maxRetries
maxRetries:
number
Defined in: packages/providers/openai/src/llm.ts:69
timeout?
optional
timeout:number
Defined in: packages/providers/openai/src/llm.ts:70
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"timeout"
|"maxRetries"
>
Defined in: packages/providers/openai/src/llm.ts:71
lazySession()
lazySession: () =>
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/llm.ts:76
Returns
Promise
<LLMInstance
>
Accessors
session
Get Signature
get session():
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/llm.ts:78
Returns
Promise
<LLMInstance
>
supportToolCall
Get Signature
get supportToolCall():
boolean
Defined in: packages/providers/openai/src/llm.ts:145
Returns
boolean
Overrides
ToolCallLLM.supportToolCall
metadata
Get Signature
get metadata():
LLMMetadata
&object
Defined in: packages/providers/openai/src/llm.ts:149
Returns
LLMMetadata
& object
Overrides
ToolCallLLM.metadata
Methods
toOpenAIRole()
static
toOpenAIRole(messageType
):ChatCompletionRole
Defined in: packages/providers/openai/src/llm.ts:165
Parameters
messageType
MessageType
Returns
ChatCompletionRole
toOpenAIMessage()
static
toOpenAIMessage(messages
):ChatCompletionMessageParam
[]
Defined in: packages/providers/openai/src/llm.ts:178
Parameters
messages
ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
ChatCompletionMessageParam
[]
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Defined in: packages/providers/openai/src/llm.ts:227
Parameters
params
LLMChatParamsStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
ToolCallLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/openai/src/llm.ts:233
Parameters
params
LLMChatParamsNonStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
ToolCallLLM.chat
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/openai/src/llm.ts:331
Parameters
baseRequestParams
ChatCompletionCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
toTool()
static
toTool(tool
):ChatCompletionTool
Defined in: packages/providers/openai/src/llm.ts:417
Parameters
tool
BaseTool
Returns
ChatCompletionTool