OpenAIResponses
Defined in: packages/providers/openai/src/responses.ts:44
Extends
ToolCallLLM
<OpenAIResponsesChatOptions
>
Constructors
Constructor
new OpenAIResponses(
init?
):OpenAIResponses
Defined in: packages/providers/openai/src/responses.ts:72
Parameters
init?
Omit
<Partial
<OpenAIResponses
>, "session"
> & object
Returns
OpenAIResponses
Overrides
ToolCallLLM<OpenAIResponsesChatOptions>.constructor
Properties
model
model:
string
Defined in: packages/providers/openai/src/responses.ts:45
temperature
temperature:
number
Defined in: packages/providers/openai/src/responses.ts:46
topP
topP:
number
Defined in: packages/providers/openai/src/responses.ts:47
maxOutputTokens?
optional
maxOutputTokens:number
Defined in: packages/providers/openai/src/responses.ts:48
additionalChatOptions?
optional
additionalChatOptions:OpenAIResponsesChatOptions
Defined in: packages/providers/openai/src/responses.ts:49
reasoningEffort?
optional
reasoningEffort:"high"
|"low"
|"medium"
Defined in: packages/providers/openai/src/responses.ts:50
apiKey?
optional
apiKey:string
Defined in: packages/providers/openai/src/responses.ts:51
baseURL?
optional
baseURL:string
Defined in: packages/providers/openai/src/responses.ts:52
maxRetries
maxRetries:
number
Defined in: packages/providers/openai/src/responses.ts:53
timeout?
optional
timeout:number
Defined in: packages/providers/openai/src/responses.ts:54
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"maxRetries"
|"timeout"
>
Defined in: packages/providers/openai/src/responses.ts:55
lazySession()
lazySession: () =>
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/responses.ts:58
Returns
Promise
<LLMInstance
>
trackPreviousResponses
trackPreviousResponses:
boolean
Defined in: packages/providers/openai/src/responses.ts:61
store
store:
boolean
Defined in: packages/providers/openai/src/responses.ts:62
user
user:
string
Defined in: packages/providers/openai/src/responses.ts:63
callMetadata
callMetadata:
StoredValue
Defined in: packages/providers/openai/src/responses.ts:64
builtInTools
builtInTools:
null
|Tool
[]
Defined in: packages/providers/openai/src/responses.ts:65
strict
strict:
boolean
Defined in: packages/providers/openai/src/responses.ts:66
include
include:
null
|ResponseIncludable
[]
Defined in: packages/providers/openai/src/responses.ts:67
instructions
instructions:
string
Defined in: packages/providers/openai/src/responses.ts:68
previousResponseId
previousResponseId:
null
|string
Defined in: packages/providers/openai/src/responses.ts:69
truncation
truncation:
null
|"auto"
|"disabled"
Defined in: packages/providers/openai/src/responses.ts:70
Accessors
session
Get Signature
get session():
Promise
<LLMInstance
>
Defined in: packages/providers/openai/src/responses.ts:123
Returns
Promise
<LLMInstance
>
supportToolCall
Get Signature
get supportToolCall():
boolean
Defined in: packages/providers/openai/src/responses.ts:130
Returns
boolean
Overrides
ToolCallLLM.supportToolCall
metadata
Get Signature
get metadata():
LLMMetadata
&object
Defined in: packages/providers/openai/src/responses.ts:134
Returns
LLMMetadata
& object
Overrides
ToolCallLLM.metadata
Methods
chat()
Call Signature
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Defined in: packages/providers/openai/src/responses.ts:497
Parameters
params
LLMChatParamsStreaming
<OpenAIResponsesChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
ToolCallLLM.chat
Call Signature
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/openai/src/responses.ts:503
Parameters
params
LLMChatParamsNonStreaming
<OpenAIResponsesChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
ToolCallLLM.chat
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Defined in: packages/providers/openai/src/responses.ts:586
Parameters
baseRequestParams
ResponseCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
toOpenAIResponsesRole()
toOpenAIResponsesRole(
messageType
):OpenAIResponsesRole
Defined in: packages/providers/openai/src/responses.ts:605
Parameters
messageType
MessageType
Returns
OpenAIResponsesRole
toOpenAIResponseMessage()
toOpenAIResponseMessage(
message
):ResponseInputItem
|ResponseInputItem
[]
Defined in: packages/providers/openai/src/responses.ts:728
Parameters
message
ChatMessage
<ToolCallLLMMessageOptions
>
Returns
ResponseInputItem
| ResponseInputItem
[]
toOpenAIResponseMessages()
toOpenAIResponseMessages(
messages
):ResponseInput
Defined in: packages/providers/openai/src/responses.ts:746
Parameters
messages
ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
ResponseInput
toResponsesTool()
toResponsesTool(
tool
):Tool
Defined in: packages/providers/openai/src/responses.ts:761
Parameters
tool
BaseTool
Returns
Tool